var/home/core/zuul-output/0000755000175000017500000000000015114341562014527 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114355164015476 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005050012615114355155017701 0ustar rootrootDec 04 17:27:49 crc systemd[1]: Starting Kubernetes Kubelet... Dec 04 17:27:49 crc restorecon[4603]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 17:27:49 crc restorecon[4603]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 04 17:27:49 crc restorecon[4603]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 04 17:27:50 crc kubenswrapper[4631]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 04 17:27:50 crc kubenswrapper[4631]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 04 17:27:50 crc kubenswrapper[4631]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 04 17:27:50 crc kubenswrapper[4631]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 04 17:27:50 crc kubenswrapper[4631]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 04 17:27:50 crc kubenswrapper[4631]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.108537 4631 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111824 4631 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111845 4631 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111850 4631 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111855 4631 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111861 4631 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111866 4631 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111872 4631 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111878 4631 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111883 4631 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111887 4631 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111891 4631 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111894 4631 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111898 4631 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111902 4631 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111907 4631 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111911 4631 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111915 4631 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111919 4631 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111923 4631 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111930 4631 feature_gate.go:330] unrecognized feature gate: Example Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111935 4631 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111940 4631 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111945 4631 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111949 4631 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111953 4631 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111956 4631 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111960 4631 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111963 4631 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111967 4631 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111970 4631 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111974 4631 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111977 4631 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111981 4631 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111985 4631 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111988 4631 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111992 4631 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111995 4631 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.111999 4631 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112004 4631 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112008 4631 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112011 4631 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112015 4631 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112019 4631 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112022 4631 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112026 4631 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112030 4631 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112034 4631 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112038 4631 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112042 4631 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112047 4631 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112052 4631 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112056 4631 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112060 4631 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112064 4631 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112068 4631 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112073 4631 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112076 4631 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112080 4631 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112083 4631 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112087 4631 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112090 4631 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112093 4631 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112097 4631 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112100 4631 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112104 4631 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112108 4631 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112111 4631 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112115 4631 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112118 4631 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112122 4631 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.112128 4631 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112199 4631 flags.go:64] FLAG: --address="0.0.0.0" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112209 4631 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112216 4631 flags.go:64] FLAG: --anonymous-auth="true" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112221 4631 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112227 4631 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112231 4631 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112236 4631 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112241 4631 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112245 4631 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112250 4631 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112254 4631 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112258 4631 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112262 4631 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112266 4631 flags.go:64] FLAG: --cgroup-root="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112270 4631 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112274 4631 flags.go:64] FLAG: --client-ca-file="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112279 4631 flags.go:64] FLAG: --cloud-config="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112283 4631 flags.go:64] FLAG: --cloud-provider="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112287 4631 flags.go:64] FLAG: --cluster-dns="[]" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112292 4631 flags.go:64] FLAG: --cluster-domain="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112296 4631 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112300 4631 flags.go:64] FLAG: --config-dir="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112304 4631 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112308 4631 flags.go:64] FLAG: --container-log-max-files="5" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112313 4631 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112317 4631 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112322 4631 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112326 4631 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112330 4631 flags.go:64] FLAG: --contention-profiling="false" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112334 4631 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112339 4631 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112344 4631 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112349 4631 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112354 4631 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112358 4631 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112362 4631 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112367 4631 flags.go:64] FLAG: --enable-load-reader="false" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112385 4631 flags.go:64] FLAG: --enable-server="true" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112389 4631 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112398 4631 flags.go:64] FLAG: --event-burst="100" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112403 4631 flags.go:64] FLAG: --event-qps="50" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112408 4631 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112419 4631 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112426 4631 flags.go:64] FLAG: --eviction-hard="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112433 4631 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112438 4631 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112443 4631 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112448 4631 flags.go:64] FLAG: --eviction-soft="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112453 4631 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112458 4631 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112463 4631 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112468 4631 flags.go:64] FLAG: --experimental-mounter-path="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112473 4631 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112478 4631 flags.go:64] FLAG: --fail-swap-on="true" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112485 4631 flags.go:64] FLAG: --feature-gates="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112491 4631 flags.go:64] FLAG: --file-check-frequency="20s" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112497 4631 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112503 4631 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112508 4631 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112513 4631 flags.go:64] FLAG: --healthz-port="10248" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112518 4631 flags.go:64] FLAG: --help="false" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112524 4631 flags.go:64] FLAG: --hostname-override="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112529 4631 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112536 4631 flags.go:64] FLAG: --http-check-frequency="20s" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112557 4631 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112563 4631 flags.go:64] FLAG: --image-credential-provider-config="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112568 4631 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112573 4631 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112583 4631 flags.go:64] FLAG: --image-service-endpoint="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112588 4631 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112593 4631 flags.go:64] FLAG: --kube-api-burst="100" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112598 4631 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112604 4631 flags.go:64] FLAG: --kube-api-qps="50" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112609 4631 flags.go:64] FLAG: --kube-reserved="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112614 4631 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112618 4631 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112622 4631 flags.go:64] FLAG: --kubelet-cgroups="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112627 4631 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112631 4631 flags.go:64] FLAG: --lock-file="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112636 4631 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112640 4631 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112645 4631 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112651 4631 flags.go:64] FLAG: --log-json-split-stream="false" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112655 4631 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112660 4631 flags.go:64] FLAG: --log-text-split-stream="false" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112664 4631 flags.go:64] FLAG: --logging-format="text" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112668 4631 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112672 4631 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112676 4631 flags.go:64] FLAG: --manifest-url="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112680 4631 flags.go:64] FLAG: --manifest-url-header="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112687 4631 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112691 4631 flags.go:64] FLAG: --max-open-files="1000000" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112696 4631 flags.go:64] FLAG: --max-pods="110" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112700 4631 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112705 4631 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112709 4631 flags.go:64] FLAG: --memory-manager-policy="None" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112713 4631 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112717 4631 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112721 4631 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112725 4631 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112735 4631 flags.go:64] FLAG: --node-status-max-images="50" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112739 4631 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112743 4631 flags.go:64] FLAG: --oom-score-adj="-999" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112748 4631 flags.go:64] FLAG: --pod-cidr="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112753 4631 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112760 4631 flags.go:64] FLAG: --pod-manifest-path="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112764 4631 flags.go:64] FLAG: --pod-max-pids="-1" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112768 4631 flags.go:64] FLAG: --pods-per-core="0" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112772 4631 flags.go:64] FLAG: --port="10250" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112776 4631 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112780 4631 flags.go:64] FLAG: --provider-id="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112784 4631 flags.go:64] FLAG: --qos-reserved="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112788 4631 flags.go:64] FLAG: --read-only-port="10255" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112792 4631 flags.go:64] FLAG: --register-node="true" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112796 4631 flags.go:64] FLAG: --register-schedulable="true" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112800 4631 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112807 4631 flags.go:64] FLAG: --registry-burst="10" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112813 4631 flags.go:64] FLAG: --registry-qps="5" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112817 4631 flags.go:64] FLAG: --reserved-cpus="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112821 4631 flags.go:64] FLAG: --reserved-memory="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112826 4631 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112830 4631 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112835 4631 flags.go:64] FLAG: --rotate-certificates="false" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112839 4631 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112843 4631 flags.go:64] FLAG: --runonce="false" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112847 4631 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112851 4631 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112856 4631 flags.go:64] FLAG: --seccomp-default="false" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112859 4631 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112863 4631 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112867 4631 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112872 4631 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112876 4631 flags.go:64] FLAG: --storage-driver-password="root" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112880 4631 flags.go:64] FLAG: --storage-driver-secure="false" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112885 4631 flags.go:64] FLAG: --storage-driver-table="stats" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112890 4631 flags.go:64] FLAG: --storage-driver-user="root" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112894 4631 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112900 4631 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112905 4631 flags.go:64] FLAG: --system-cgroups="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112910 4631 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112917 4631 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112921 4631 flags.go:64] FLAG: --tls-cert-file="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112925 4631 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112930 4631 flags.go:64] FLAG: --tls-min-version="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112935 4631 flags.go:64] FLAG: --tls-private-key-file="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112939 4631 flags.go:64] FLAG: --topology-manager-policy="none" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112943 4631 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112947 4631 flags.go:64] FLAG: --topology-manager-scope="container" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112951 4631 flags.go:64] FLAG: --v="2" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112957 4631 flags.go:64] FLAG: --version="false" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112964 4631 flags.go:64] FLAG: --vmodule="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112970 4631 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.112976 4631 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113078 4631 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113085 4631 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113090 4631 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113095 4631 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113100 4631 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113104 4631 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113108 4631 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113112 4631 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113117 4631 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113121 4631 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113125 4631 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113129 4631 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113133 4631 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113137 4631 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113140 4631 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113144 4631 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113148 4631 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113152 4631 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113157 4631 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113161 4631 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113165 4631 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113170 4631 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113174 4631 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113180 4631 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113185 4631 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113190 4631 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113194 4631 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113207 4631 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113212 4631 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113221 4631 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113225 4631 feature_gate.go:330] unrecognized feature gate: Example Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113229 4631 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113234 4631 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113239 4631 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113243 4631 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113248 4631 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113252 4631 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113256 4631 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113260 4631 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113267 4631 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113272 4631 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113277 4631 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113281 4631 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113286 4631 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113290 4631 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113294 4631 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113299 4631 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113303 4631 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113308 4631 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113313 4631 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113317 4631 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113321 4631 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113326 4631 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113330 4631 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113334 4631 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113338 4631 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113342 4631 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113347 4631 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113351 4631 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113359 4631 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113364 4631 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113389 4631 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113394 4631 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113413 4631 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113418 4631 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113423 4631 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113427 4631 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113432 4631 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113436 4631 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113441 4631 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.113446 4631 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.113461 4631 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.122548 4631 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.122765 4631 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.122923 4631 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.122997 4631 feature_gate.go:330] unrecognized feature gate: Example Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.123067 4631 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.123113 4631 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.123156 4631 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.123205 4631 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.123249 4631 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.123292 4631 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.123343 4631 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.123434 4631 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.123510 4631 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.123592 4631 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.123671 4631 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.123754 4631 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.123821 4631 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.123871 4631 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.123928 4631 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.123988 4631 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.124041 4631 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.124096 4631 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.124149 4631 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.124203 4631 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.124259 4631 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.124308 4631 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.124367 4631 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.124450 4631 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.124507 4631 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.124552 4631 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.124595 4631 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.124638 4631 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.124682 4631 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.124730 4631 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.124784 4631 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.124905 4631 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.124966 4631 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125011 4631 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125054 4631 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125097 4631 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125139 4631 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125183 4631 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125234 4631 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125277 4631 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125320 4631 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125365 4631 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125447 4631 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125495 4631 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125538 4631 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125604 4631 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125653 4631 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125700 4631 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125755 4631 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125802 4631 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125850 4631 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125902 4631 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.125968 4631 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.126019 4631 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.126063 4631 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.126111 4631 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.126165 4631 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.126217 4631 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.126266 4631 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.126320 4631 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.126554 4631 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.126626 4631 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.126679 4631 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.126741 4631 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.126794 4631 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.126844 4631 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.126895 4631 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.126947 4631 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.127005 4631 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.127061 4631 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.127298 4631 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.127361 4631 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.127445 4631 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.127498 4631 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.127549 4631 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.127606 4631 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.127663 4631 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.127734 4631 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.127797 4631 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.127849 4631 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.127899 4631 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.127950 4631 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.128003 4631 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.128091 4631 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.128166 4631 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.128234 4631 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.128286 4631 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.128332 4631 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.128399 4631 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.128470 4631 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.128537 4631 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.128600 4631 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.128681 4631 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.128744 4631 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.128806 4631 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.128871 4631 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.128930 4631 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.128983 4631 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.129028 4631 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.129070 4631 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.129117 4631 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.129160 4631 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.129202 4631 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.129244 4631 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.129290 4631 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.129334 4631 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.129403 4631 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.129487 4631 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.129564 4631 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.129631 4631 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.129694 4631 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.129753 4631 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.129829 4631 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.129886 4631 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.129947 4631 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.129992 4631 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.130034 4631 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.130076 4631 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.130125 4631 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.130168 4631 feature_gate.go:330] unrecognized feature gate: Example Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.130219 4631 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.130280 4631 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.130336 4631 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.130411 4631 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.130492 4631 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.130575 4631 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.130657 4631 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.130728 4631 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.130780 4631 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.130823 4631 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.130866 4631 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.130915 4631 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.130964 4631 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.131007 4631 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.131049 4631 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.131091 4631 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.131135 4631 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.131184 4631 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.131228 4631 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.131332 4631 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.131400 4631 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.131451 4631 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.131915 4631 server.go:940] "Client rotation is on, will bootstrap in background" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.134657 4631 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.134807 4631 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.135367 4631 server.go:997] "Starting client certificate rotation" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.135458 4631 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.135656 4631 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-23 02:38:45.361402374 +0000 UTC Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.135759 4631 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.141500 4631 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.144409 4631 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.145870 4631 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.151678 4631 log.go:25] "Validated CRI v1 runtime API" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.165016 4631 log.go:25] "Validated CRI v1 image API" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.166107 4631 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.168627 4631 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-04-17-22-00-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.168652 4631 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:45 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:42 fsType:tmpfs blockSize:0}] Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.178215 4631 manager.go:217] Machine: {Timestamp:2025-12-04 17:27:50.177259897 +0000 UTC m=+0.209501905 CPUVendorID:AuthenticAMD NumCores:8 NumPhysicalCores:1 NumSockets:8 CpuFrequency:2800000 MemoryCapacity:25199476736 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:c04d3b5b-9d92-423d-a922-0c9769c3a8b4 BootID:45733624-1f47-45a2-a6e9-c03f47562993 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:12599738368 Type:vfs Inodes:3076108 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:5039898624 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:12599738368 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:45 Capacity:2519945216 Type:vfs Inodes:615221 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:42 Capacity:1073741824 Type:vfs Inodes:3076108 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:429496729600 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:1c:ec:8c Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:1c:ec:8c Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:ed:62:b0 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:f7:c3:e5 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:77:ad:70 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:41:2a:f8 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:d2:82:9e:c3:1a:37 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:8e:17:a6:79:08:48 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:25199476736 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.178389 4631 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.178512 4631 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.178812 4631 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.178956 4631 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.178986 4631 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.179161 4631 topology_manager.go:138] "Creating topology manager with none policy" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.179177 4631 container_manager_linux.go:303] "Creating device plugin manager" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.179344 4631 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.179402 4631 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.179668 4631 state_mem.go:36] "Initialized new in-memory state store" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.179746 4631 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.180576 4631 kubelet.go:418] "Attempting to sync node with API server" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.180620 4631 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.180646 4631 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.180663 4631 kubelet.go:324] "Adding apiserver pod source" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.180679 4631 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.182452 4631 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.183575 4631 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.183909 4631 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.184130 4631 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.184344 4631 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.184482 4631 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.185067 4631 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.185861 4631 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.185887 4631 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.185895 4631 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.185905 4631 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.185916 4631 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.185923 4631 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.185929 4631 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.185940 4631 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.185947 4631 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.185954 4631 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.185963 4631 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.185970 4631 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.186110 4631 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.186588 4631 server.go:1280] "Started kubelet" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.186711 4631 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.186716 4631 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.188288 4631 server.go:460] "Adding debug handlers to kubelet server" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.188391 4631 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.188502 4631 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.188524 4631 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.188672 4631 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 02:59:59.584698948 +0000 UTC Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.189799 4631 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 897h32m9.394910709s for next certificate rotation Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.188918 4631 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.188904 4631 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.189942 4631 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.189212 4631 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 17:27:50 crc systemd[1]: Started Kubernetes Kubelet. Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.192167 4631 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" interval="200ms" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.190022 4631 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.192493 4631 factory.go:55] Registering systemd factory Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.192548 4631 factory.go:221] Registration of the systemd container factory successfully Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.192490 4631 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.193066 4631 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.192857 4631 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.194:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e134024792778 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-04 17:27:50.186567544 +0000 UTC m=+0.218809542,LastTimestamp:2025-12-04 17:27:50.186567544 +0000 UTC m=+0.218809542,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.194659 4631 factory.go:153] Registering CRI-O factory Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.194685 4631 factory.go:221] Registration of the crio container factory successfully Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.194763 4631 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.194800 4631 factory.go:103] Registering Raw factory Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.194817 4631 manager.go:1196] Started watching for new ooms in manager Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.195576 4631 manager.go:319] Starting recovery of all containers Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.201213 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.201944 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.201968 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.201997 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202012 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202063 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202079 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202092 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202114 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202127 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202145 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202164 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202183 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202208 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202229 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202245 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202264 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202277 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202290 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202307 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202324 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202343 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202359 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202390 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202415 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.202428 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.206061 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207415 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207460 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207481 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207502 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207525 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207547 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207570 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207627 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207648 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207672 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207690 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207713 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207732 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207754 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207777 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207798 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207821 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207839 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207859 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207885 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.207907 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.216352 4631 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.216921 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.216972 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.216995 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217017 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217049 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217085 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217110 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217134 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217155 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217178 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217201 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217221 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217243 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217260 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217277 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217298 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217316 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217334 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217352 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217392 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217413 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217430 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217447 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217508 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217530 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217546 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217563 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217580 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217597 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217623 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217641 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217657 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217674 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217690 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217706 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217723 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217738 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217756 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217774 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217791 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217807 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217825 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217843 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217861 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217878 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217895 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217911 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217928 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217946 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217962 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217980 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.217995 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218012 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218028 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218044 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218059 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218086 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218103 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218123 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218141 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218163 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218181 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218199 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218219 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218237 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218255 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218271 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218289 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218307 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218327 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218344 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218360 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218400 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218420 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218435 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218453 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218478 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218494 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218510 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218527 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218543 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218560 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218577 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218595 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218615 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218631 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218647 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218663 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218681 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218699 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218715 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218732 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218748 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218765 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218782 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218799 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218817 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218835 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218853 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218870 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218890 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218908 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218926 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218942 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218964 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.218983 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219000 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219018 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219034 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219053 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219071 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219091 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219109 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219127 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219155 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219175 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219193 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219210 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219229 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219246 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219263 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219279 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219296 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219312 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219345 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219360 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219397 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219413 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219430 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219448 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219463 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219478 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219500 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219532 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219551 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219569 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219600 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219630 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219660 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219681 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219697 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219717 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219733 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219748 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219762 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219779 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219794 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219810 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219828 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219843 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219857 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219872 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219891 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219904 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219918 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219933 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219947 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219961 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219977 4631 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.219992 4631 reconstruct.go:97] "Volume reconstruction finished" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.220003 4631 reconciler.go:26] "Reconciler: start to sync state" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.220900 4631 manager.go:324] Recovery completed Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.229959 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.231309 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.231354 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.231506 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.232208 4631 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.232218 4631 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.232234 4631 state_mem.go:36] "Initialized new in-memory state store" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.236488 4631 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.238036 4631 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.238086 4631 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 04 17:27:50 crc kubenswrapper[4631]: I1204 17:27:50.238115 4631 kubelet.go:2335] "Starting kubelet main sync loop" Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.238174 4631 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 04 17:27:50 crc kubenswrapper[4631]: W1204 17:27:50.239470 4631 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.239539 4631 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.290870 4631 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.339228 4631 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.391622 4631 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.393140 4631 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" interval="400ms" Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.491973 4631 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.539654 4631 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.592060 4631 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.692345 4631 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.793341 4631 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.793890 4631 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" interval="800ms" Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.893986 4631 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.939987 4631 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 04 17:27:50 crc kubenswrapper[4631]: E1204 17:27:50.994885 4631 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 17:27:51 crc kubenswrapper[4631]: E1204 17:27:51.095555 4631 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.189342 4631 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:51 crc kubenswrapper[4631]: E1204 17:27:51.196637 4631 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 17:27:51 crc kubenswrapper[4631]: W1204 17:27:51.289407 4631 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:51 crc kubenswrapper[4631]: E1204 17:27:51.289482 4631 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Dec 04 17:27:51 crc kubenswrapper[4631]: E1204 17:27:51.297717 4631 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 17:27:51 crc kubenswrapper[4631]: E1204 17:27:51.397833 4631 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.439287 4631 policy_none.go:49] "None policy: Start" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.440907 4631 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.440935 4631 state_mem.go:35] "Initializing new in-memory state store" Dec 04 17:27:51 crc kubenswrapper[4631]: E1204 17:27:51.499198 4631 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 17:27:51 crc kubenswrapper[4631]: W1204 17:27:51.534357 4631 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:51 crc kubenswrapper[4631]: E1204 17:27:51.534516 4631 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Dec 04 17:27:51 crc kubenswrapper[4631]: W1204 17:27:51.539639 4631 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:51 crc kubenswrapper[4631]: E1204 17:27:51.539817 4631 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Dec 04 17:27:51 crc kubenswrapper[4631]: W1204 17:27:51.564936 4631 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:51 crc kubenswrapper[4631]: E1204 17:27:51.565040 4631 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Dec 04 17:27:51 crc kubenswrapper[4631]: E1204 17:27:51.594947 4631 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" interval="1.6s" Dec 04 17:27:51 crc kubenswrapper[4631]: E1204 17:27:51.600552 4631 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.618272 4631 manager.go:334] "Starting Device Plugin manager" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.618339 4631 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.618356 4631 server.go:79] "Starting device plugin registration server" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.618976 4631 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.618997 4631 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.619178 4631 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.619294 4631 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.619304 4631 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 04 17:27:51 crc kubenswrapper[4631]: E1204 17:27:51.626170 4631 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.719703 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.721652 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.721803 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.721892 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.721991 4631 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 17:27:51 crc kubenswrapper[4631]: E1204 17:27:51.722716 4631 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.194:6443: connect: connection refused" node="crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.740807 4631 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.740907 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.742632 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.742667 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.742679 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.742799 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.743327 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.743465 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.743804 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.743843 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.743856 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.743984 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.744317 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.744445 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.744522 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.744548 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.744467 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.744678 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.744707 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.744718 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.744808 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.744927 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.744960 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.745590 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.745677 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.745747 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.745855 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.745876 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.745886 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.745989 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.746138 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.746203 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.746286 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.746322 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.746337 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.747200 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.747237 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.747250 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.747422 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.747456 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.747692 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.747748 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.747773 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.748579 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.748669 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.748731 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.843442 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.843537 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.843591 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.843630 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.843667 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.843703 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.843733 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.843769 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.843859 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.843897 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.843926 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.843953 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.844035 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.844100 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.844120 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.923290 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.924635 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.924683 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.924693 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.924719 4631 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 17:27:51 crc kubenswrapper[4631]: E1204 17:27:51.925239 4631 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.194:6443: connect: connection refused" node="crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.945675 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.945736 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.945770 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.945792 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.945816 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.945836 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.945855 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.945876 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.945931 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.945943 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.945962 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.945982 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.945955 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.946020 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.945997 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.946049 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.946069 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.946037 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.946092 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.946024 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.945966 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.946081 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.946038 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.946195 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.946193 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.946207 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.946247 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.946248 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.946305 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 17:27:51 crc kubenswrapper[4631]: I1204 17:27:51.946334 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 04 17:27:52 crc kubenswrapper[4631]: I1204 17:27:52.069704 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 04 17:27:52 crc kubenswrapper[4631]: I1204 17:27:52.097584 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 04 17:27:52 crc kubenswrapper[4631]: W1204 17:27:52.098161 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-2726514ea6220dad4c70009e5c08230ec798e06785066eafe2c14564b9cdb0f6 WatchSource:0}: Error finding container 2726514ea6220dad4c70009e5c08230ec798e06785066eafe2c14564b9cdb0f6: Status 404 returned error can't find the container with id 2726514ea6220dad4c70009e5c08230ec798e06785066eafe2c14564b9cdb0f6 Dec 04 17:27:52 crc kubenswrapper[4631]: I1204 17:27:52.104995 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:27:52 crc kubenswrapper[4631]: I1204 17:27:52.127337 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:27:52 crc kubenswrapper[4631]: I1204 17:27:52.132268 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 17:27:52 crc kubenswrapper[4631]: W1204 17:27:52.154706 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-022dfc6d17aef7f7b13af297c7fcca8342a4444d0e3d7a9ec669e36a2fa6f342 WatchSource:0}: Error finding container 022dfc6d17aef7f7b13af297c7fcca8342a4444d0e3d7a9ec669e36a2fa6f342: Status 404 returned error can't find the container with id 022dfc6d17aef7f7b13af297c7fcca8342a4444d0e3d7a9ec669e36a2fa6f342 Dec 04 17:27:52 crc kubenswrapper[4631]: W1204 17:27:52.156537 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-37060ce5022fcec48024a886b65be0c40c843a370e66a2915e7a3c3385fb48e6 WatchSource:0}: Error finding container 37060ce5022fcec48024a886b65be0c40c843a370e66a2915e7a3c3385fb48e6: Status 404 returned error can't find the container with id 37060ce5022fcec48024a886b65be0c40c843a370e66a2915e7a3c3385fb48e6 Dec 04 17:27:52 crc kubenswrapper[4631]: W1204 17:27:52.165005 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-8f869f82dfdf669dc6ccea8ba8be213dadbfb12e968d57d16ab5e732fcc54e12 WatchSource:0}: Error finding container 8f869f82dfdf669dc6ccea8ba8be213dadbfb12e968d57d16ab5e732fcc54e12: Status 404 returned error can't find the container with id 8f869f82dfdf669dc6ccea8ba8be213dadbfb12e968d57d16ab5e732fcc54e12 Dec 04 17:27:52 crc kubenswrapper[4631]: W1204 17:27:52.169247 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-cb76398ba29c767f1b588fbedf802b3e6244810b0a04d1204a869ada7e2efec9 WatchSource:0}: Error finding container cb76398ba29c767f1b588fbedf802b3e6244810b0a04d1204a869ada7e2efec9: Status 404 returned error can't find the container with id cb76398ba29c767f1b588fbedf802b3e6244810b0a04d1204a869ada7e2efec9 Dec 04 17:27:52 crc kubenswrapper[4631]: I1204 17:27:52.171470 4631 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 04 17:27:52 crc kubenswrapper[4631]: E1204 17:27:52.172775 4631 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Dec 04 17:27:52 crc kubenswrapper[4631]: I1204 17:27:52.190018 4631 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:52 crc kubenswrapper[4631]: I1204 17:27:52.246475 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"37060ce5022fcec48024a886b65be0c40c843a370e66a2915e7a3c3385fb48e6"} Dec 04 17:27:52 crc kubenswrapper[4631]: I1204 17:27:52.247896 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"022dfc6d17aef7f7b13af297c7fcca8342a4444d0e3d7a9ec669e36a2fa6f342"} Dec 04 17:27:52 crc kubenswrapper[4631]: I1204 17:27:52.248966 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"2726514ea6220dad4c70009e5c08230ec798e06785066eafe2c14564b9cdb0f6"} Dec 04 17:27:52 crc kubenswrapper[4631]: I1204 17:27:52.250926 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"cb76398ba29c767f1b588fbedf802b3e6244810b0a04d1204a869ada7e2efec9"} Dec 04 17:27:52 crc kubenswrapper[4631]: I1204 17:27:52.251841 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"8f869f82dfdf669dc6ccea8ba8be213dadbfb12e968d57d16ab5e732fcc54e12"} Dec 04 17:27:52 crc kubenswrapper[4631]: I1204 17:27:52.325604 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:52 crc kubenswrapper[4631]: I1204 17:27:52.327174 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:52 crc kubenswrapper[4631]: I1204 17:27:52.327230 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:52 crc kubenswrapper[4631]: I1204 17:27:52.327240 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:52 crc kubenswrapper[4631]: I1204 17:27:52.327275 4631 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 17:27:52 crc kubenswrapper[4631]: E1204 17:27:52.327925 4631 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.194:6443: connect: connection refused" node="crc" Dec 04 17:27:53 crc kubenswrapper[4631]: W1204 17:27:53.072268 4631 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:53 crc kubenswrapper[4631]: E1204 17:27:53.072890 4631 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.128761 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.131205 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.131273 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.131294 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.131340 4631 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 17:27:53 crc kubenswrapper[4631]: E1204 17:27:53.132074 4631 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.194:6443: connect: connection refused" node="crc" Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.190014 4631 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:53 crc kubenswrapper[4631]: E1204 17:27:53.196600 4631 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" interval="3.2s" Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.257299 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52"} Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.259241 4631 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="80fe5b4ac3c8df8b43b2ea67550347a87ff6b6edabbadce4ae53d46cdc588eb9" exitCode=0 Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.259293 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"80fe5b4ac3c8df8b43b2ea67550347a87ff6b6edabbadce4ae53d46cdc588eb9"} Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.259340 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.260191 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.260225 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.260236 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.261534 4631 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="581273bead771845e5261f066fc22bbde1f8a36db2db091b00a9e008fcb181e5" exitCode=0 Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.261673 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.261676 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"581273bead771845e5261f066fc22bbde1f8a36db2db091b00a9e008fcb181e5"} Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.262891 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.262923 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.262937 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.263309 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a"} Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.264810 4631 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0" exitCode=0 Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.264925 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0"} Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.265099 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.266021 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.266054 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:53 crc kubenswrapper[4631]: I1204 17:27:53.266064 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:53 crc kubenswrapper[4631]: W1204 17:27:53.379026 4631 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:53 crc kubenswrapper[4631]: E1204 17:27:53.379490 4631 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Dec 04 17:27:54 crc kubenswrapper[4631]: W1204 17:27:54.185776 4631 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:54 crc kubenswrapper[4631]: E1204 17:27:54.185889 4631 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Dec 04 17:27:54 crc kubenswrapper[4631]: I1204 17:27:54.189692 4631 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:54 crc kubenswrapper[4631]: W1204 17:27:54.685700 4631 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:54 crc kubenswrapper[4631]: E1204 17:27:54.685819 4631 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Dec 04 17:27:54 crc kubenswrapper[4631]: I1204 17:27:54.732723 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:54 crc kubenswrapper[4631]: I1204 17:27:54.734165 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:54 crc kubenswrapper[4631]: I1204 17:27:54.734204 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:54 crc kubenswrapper[4631]: I1204 17:27:54.734215 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:54 crc kubenswrapper[4631]: I1204 17:27:54.734240 4631 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 17:27:54 crc kubenswrapper[4631]: E1204 17:27:54.734741 4631 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.194:6443: connect: connection refused" node="crc" Dec 04 17:27:55 crc kubenswrapper[4631]: I1204 17:27:55.190301 4631 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:55 crc kubenswrapper[4631]: E1204 17:27:55.511871 4631 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.194:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e134024792778 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-04 17:27:50.186567544 +0000 UTC m=+0.218809542,LastTimestamp:2025-12-04 17:27:50.186567544 +0000 UTC m=+0.218809542,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 04 17:27:56 crc kubenswrapper[4631]: I1204 17:27:56.189328 4631 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:56 crc kubenswrapper[4631]: E1204 17:27:56.397893 4631 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" interval="6.4s" Dec 04 17:27:56 crc kubenswrapper[4631]: I1204 17:27:56.428523 4631 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 04 17:27:56 crc kubenswrapper[4631]: E1204 17:27:56.429745 4631 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Dec 04 17:27:56 crc kubenswrapper[4631]: W1204 17:27:56.799707 4631 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:56 crc kubenswrapper[4631]: E1204 17:27:56.799800 4631 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Dec 04 17:27:57 crc kubenswrapper[4631]: I1204 17:27:57.188997 4631 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:57 crc kubenswrapper[4631]: I1204 17:27:57.275509 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:57 crc kubenswrapper[4631]: I1204 17:27:57.276475 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:57 crc kubenswrapper[4631]: I1204 17:27:57.276538 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:57 crc kubenswrapper[4631]: I1204 17:27:57.276562 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:57 crc kubenswrapper[4631]: I1204 17:27:57.934873 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:57 crc kubenswrapper[4631]: I1204 17:27:57.936989 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:57 crc kubenswrapper[4631]: I1204 17:27:57.937086 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:57 crc kubenswrapper[4631]: I1204 17:27:57.937107 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:57 crc kubenswrapper[4631]: I1204 17:27:57.937162 4631 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 17:27:57 crc kubenswrapper[4631]: E1204 17:27:57.938108 4631 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.194:6443: connect: connection refused" node="crc" Dec 04 17:27:58 crc kubenswrapper[4631]: W1204 17:27:58.153411 4631 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:58 crc kubenswrapper[4631]: E1204 17:27:58.153618 4631 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Dec 04 17:27:58 crc kubenswrapper[4631]: I1204 17:27:58.190027 4631 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:59 crc kubenswrapper[4631]: I1204 17:27:59.189852 4631 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:59 crc kubenswrapper[4631]: I1204 17:27:59.282930 4631 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52" exitCode=0 Dec 04 17:27:59 crc kubenswrapper[4631]: I1204 17:27:59.282997 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52"} Dec 04 17:27:59 crc kubenswrapper[4631]: I1204 17:27:59.283155 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:59 crc kubenswrapper[4631]: I1204 17:27:59.284422 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:59 crc kubenswrapper[4631]: I1204 17:27:59.284473 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:59 crc kubenswrapper[4631]: I1204 17:27:59.284497 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:59 crc kubenswrapper[4631]: I1204 17:27:59.287288 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:27:59 crc kubenswrapper[4631]: I1204 17:27:59.288579 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:27:59 crc kubenswrapper[4631]: I1204 17:27:59.288639 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:27:59 crc kubenswrapper[4631]: I1204 17:27:59.288663 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:27:59 crc kubenswrapper[4631]: W1204 17:27:59.894960 4631 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:59 crc kubenswrapper[4631]: E1204 17:27:59.895036 4631 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Dec 04 17:27:59 crc kubenswrapper[4631]: W1204 17:27:59.897611 4631 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:27:59 crc kubenswrapper[4631]: E1204 17:27:59.897676 4631 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.194:6443: connect: connection refused" logger="UnhandledError" Dec 04 17:28:00 crc kubenswrapper[4631]: I1204 17:28:00.189744 4631 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.194:6443: connect: connection refused Dec 04 17:28:00 crc kubenswrapper[4631]: I1204 17:28:00.286235 4631 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="e708fabc929f262303c9acbb37958eb45e6758ee2fce2f788088a0c0c3897d44" exitCode=0 Dec 04 17:28:00 crc kubenswrapper[4631]: I1204 17:28:00.286289 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"e708fabc929f262303c9acbb37958eb45e6758ee2fce2f788088a0c0c3897d44"} Dec 04 17:28:00 crc kubenswrapper[4631]: I1204 17:28:00.286412 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:00 crc kubenswrapper[4631]: I1204 17:28:00.287532 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:00 crc kubenswrapper[4631]: I1204 17:28:00.287556 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:00 crc kubenswrapper[4631]: I1204 17:28:00.287565 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:00 crc kubenswrapper[4631]: I1204 17:28:00.291535 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"5556f68691980174d165f902e2983cffe12a923d267b5615c07f9dc7da73efa0"} Dec 04 17:28:00 crc kubenswrapper[4631]: I1204 17:28:00.291596 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:00 crc kubenswrapper[4631]: I1204 17:28:00.292469 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:00 crc kubenswrapper[4631]: I1204 17:28:00.292496 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:00 crc kubenswrapper[4631]: I1204 17:28:00.292509 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:00 crc kubenswrapper[4631]: I1204 17:28:00.293643 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357"} Dec 04 17:28:00 crc kubenswrapper[4631]: I1204 17:28:00.293674 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95"} Dec 04 17:28:00 crc kubenswrapper[4631]: I1204 17:28:00.295524 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"17ca1bf820b90d0dcab27c3f59301d671ae001940de1b634174e371a77a5208c"} Dec 04 17:28:00 crc kubenswrapper[4631]: I1204 17:28:00.295554 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"2232c209adbed3161e31474c06d86e5184985a392f0e7f7c115889bf80ad5266"} Dec 04 17:28:00 crc kubenswrapper[4631]: I1204 17:28:00.311832 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f"} Dec 04 17:28:00 crc kubenswrapper[4631]: I1204 17:28:00.311873 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284"} Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.317031 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a"} Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.317597 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e"} Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.320045 4631 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="d1bc78f97102d67744a9b8d893c23481857ca96c96612d53a8e34c47ad0ecfde" exitCode=0 Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.320130 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"d1bc78f97102d67744a9b8d893c23481857ca96c96612d53a8e34c47ad0ecfde"} Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.320345 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.321935 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.322009 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.322024 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.323873 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec"} Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.323926 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.325359 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.325416 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.325434 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.328400 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"f3b24dbe6d9b284dd49778fcde6a5d7b665e2fca8181a7cb11c2d14028da9fb4"} Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.328466 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.328486 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.329539 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.329570 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.329581 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.329635 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.329661 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:01 crc kubenswrapper[4631]: I1204 17:28:01.329673 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:01 crc kubenswrapper[4631]: E1204 17:28:01.626498 4631 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.333772 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0b8cfc70d64a4380de40ad3946bf5eddbaac430c8ee2e5dd4c217abed08c3e42"} Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.333817 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b7a1a9026436075614a6cab59f54530fb2ef97810be45124532b9a0fd95f475f"} Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.333828 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"896556077d3d66bedf254a876ed450916a16b124caa562611502420256c31b6b"} Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.336981 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad"} Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.337046 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.337066 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.337111 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.337281 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.337802 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.337829 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.337838 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.338404 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.338429 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.338437 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.338455 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.338479 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.338488 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.660721 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.766970 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:28:02 crc kubenswrapper[4631]: I1204 17:28:02.938628 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:28:03 crc kubenswrapper[4631]: I1204 17:28:03.346071 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a88d1f98e54af789fbfbe9da572e52d9ff61a0cf3f5886892bbc4938ec6380c2"} Dec 04 17:28:03 crc kubenswrapper[4631]: I1204 17:28:03.346273 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:03 crc kubenswrapper[4631]: I1204 17:28:03.347276 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:03 crc kubenswrapper[4631]: I1204 17:28:03.347604 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:03 crc kubenswrapper[4631]: I1204 17:28:03.347701 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:03 crc kubenswrapper[4631]: I1204 17:28:03.347721 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:03 crc kubenswrapper[4631]: I1204 17:28:03.348023 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:03 crc kubenswrapper[4631]: I1204 17:28:03.348666 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:03 crc kubenswrapper[4631]: I1204 17:28:03.348716 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:03 crc kubenswrapper[4631]: I1204 17:28:03.348739 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:03 crc kubenswrapper[4631]: I1204 17:28:03.349824 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:03 crc kubenswrapper[4631]: I1204 17:28:03.349868 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:03 crc kubenswrapper[4631]: I1204 17:28:03.349884 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:03 crc kubenswrapper[4631]: I1204 17:28:03.721842 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.338315 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.340044 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.340309 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.340568 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.340785 4631 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.355141 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"19f71aeed47596904b2e04c95d3edfb7a59afe8fb79b50495360b7de6f0d03a7"} Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.355171 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.355242 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.355328 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.357575 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.357604 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.357623 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.357649 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.357693 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.357710 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.357809 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.357826 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.357840 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.612728 4631 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 04 17:28:04 crc kubenswrapper[4631]: I1204 17:28:04.997116 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 04 17:28:05 crc kubenswrapper[4631]: I1204 17:28:05.012549 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:28:05 crc kubenswrapper[4631]: I1204 17:28:05.357315 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:05 crc kubenswrapper[4631]: I1204 17:28:05.357407 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:05 crc kubenswrapper[4631]: I1204 17:28:05.357331 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:05 crc kubenswrapper[4631]: I1204 17:28:05.358807 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:05 crc kubenswrapper[4631]: I1204 17:28:05.358838 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:05 crc kubenswrapper[4631]: I1204 17:28:05.358848 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:05 crc kubenswrapper[4631]: I1204 17:28:05.358834 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:05 crc kubenswrapper[4631]: I1204 17:28:05.358838 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:05 crc kubenswrapper[4631]: I1204 17:28:05.358874 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:05 crc kubenswrapper[4631]: I1204 17:28:05.358887 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:05 crc kubenswrapper[4631]: I1204 17:28:05.358890 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:05 crc kubenswrapper[4631]: I1204 17:28:05.358931 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:06 crc kubenswrapper[4631]: I1204 17:28:06.359352 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:06 crc kubenswrapper[4631]: I1204 17:28:06.360616 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:06 crc kubenswrapper[4631]: I1204 17:28:06.360748 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:06 crc kubenswrapper[4631]: I1204 17:28:06.360831 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:06 crc kubenswrapper[4631]: I1204 17:28:06.392499 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:28:06 crc kubenswrapper[4631]: I1204 17:28:06.392828 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:06 crc kubenswrapper[4631]: I1204 17:28:06.394166 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:06 crc kubenswrapper[4631]: I1204 17:28:06.394209 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:06 crc kubenswrapper[4631]: I1204 17:28:06.394220 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:08 crc kubenswrapper[4631]: I1204 17:28:08.013158 4631 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 04 17:28:08 crc kubenswrapper[4631]: I1204 17:28:08.013260 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 04 17:28:08 crc kubenswrapper[4631]: I1204 17:28:08.016583 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:28:08 crc kubenswrapper[4631]: I1204 17:28:08.016982 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:08 crc kubenswrapper[4631]: I1204 17:28:08.018246 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:08 crc kubenswrapper[4631]: I1204 17:28:08.018307 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:08 crc kubenswrapper[4631]: I1204 17:28:08.018320 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:08 crc kubenswrapper[4631]: I1204 17:28:08.021067 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:28:08 crc kubenswrapper[4631]: I1204 17:28:08.364637 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:08 crc kubenswrapper[4631]: I1204 17:28:08.365953 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:08 crc kubenswrapper[4631]: I1204 17:28:08.366004 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:08 crc kubenswrapper[4631]: I1204 17:28:08.366017 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:08 crc kubenswrapper[4631]: I1204 17:28:08.372257 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:28:09 crc kubenswrapper[4631]: I1204 17:28:09.368214 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:09 crc kubenswrapper[4631]: I1204 17:28:09.370094 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:09 crc kubenswrapper[4631]: I1204 17:28:09.370182 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:09 crc kubenswrapper[4631]: I1204 17:28:09.370210 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:11 crc kubenswrapper[4631]: I1204 17:28:11.033907 4631 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 04 17:28:11 crc kubenswrapper[4631]: I1204 17:28:11.033979 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 04 17:28:11 crc kubenswrapper[4631]: I1204 17:28:11.401019 4631 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]log ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]etcd ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/openshift.io-api-request-count-filter ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/openshift.io-startkubeinformers ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/generic-apiserver-start-informers ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/priority-and-fairness-config-consumer ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/priority-and-fairness-filter ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/start-apiextensions-informers ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/start-apiextensions-controllers ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/crd-informer-synced ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/start-system-namespaces-controller ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/start-cluster-authentication-info-controller ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/start-legacy-token-tracking-controller ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/start-service-ip-repair-controllers ok Dec 04 17:28:11 crc kubenswrapper[4631]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Dec 04 17:28:11 crc kubenswrapper[4631]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/priority-and-fairness-config-producer ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/bootstrap-controller ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/start-kube-aggregator-informers ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/apiservice-status-local-available-controller ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/apiservice-status-remote-available-controller ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/apiservice-registration-controller ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/apiservice-wait-for-first-sync ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/apiservice-discovery-controller ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/kube-apiserver-autoregistration ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]autoregister-completion ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/apiservice-openapi-controller ok Dec 04 17:28:11 crc kubenswrapper[4631]: [+]poststarthook/apiservice-openapiv3-controller ok Dec 04 17:28:11 crc kubenswrapper[4631]: livez check failed Dec 04 17:28:11 crc kubenswrapper[4631]: I1204 17:28:11.401091 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 17:28:11 crc kubenswrapper[4631]: E1204 17:28:11.626577 4631 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 04 17:28:12 crc kubenswrapper[4631]: I1204 17:28:12.637857 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 04 17:28:12 crc kubenswrapper[4631]: I1204 17:28:12.638153 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:12 crc kubenswrapper[4631]: I1204 17:28:12.639961 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:12 crc kubenswrapper[4631]: I1204 17:28:12.640033 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:12 crc kubenswrapper[4631]: I1204 17:28:12.640058 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:12 crc kubenswrapper[4631]: I1204 17:28:12.681229 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 04 17:28:12 crc kubenswrapper[4631]: I1204 17:28:12.939510 4631 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 04 17:28:12 crc kubenswrapper[4631]: I1204 17:28:12.939601 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 04 17:28:13 crc kubenswrapper[4631]: I1204 17:28:13.378012 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:13 crc kubenswrapper[4631]: I1204 17:28:13.378810 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:13 crc kubenswrapper[4631]: I1204 17:28:13.378841 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:13 crc kubenswrapper[4631]: I1204 17:28:13.378851 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:13 crc kubenswrapper[4631]: I1204 17:28:13.397173 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 04 17:28:14 crc kubenswrapper[4631]: I1204 17:28:14.379953 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:14 crc kubenswrapper[4631]: I1204 17:28:14.380785 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:14 crc kubenswrapper[4631]: I1204 17:28:14.380822 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:14 crc kubenswrapper[4631]: I1204 17:28:14.380833 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:14 crc kubenswrapper[4631]: I1204 17:28:14.601996 4631 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 04 17:28:14 crc kubenswrapper[4631]: I1204 17:28:14.602067 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.026952 4631 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="7s" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.028210 4631 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.030217 4631 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.030672 4631 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.030791 4631 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.031152 4631 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.032136 4631 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.039548 4631 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.057308 4631 csr.go:261] certificate signing request csr-hv4f6 is approved, waiting to be issued Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.064247 4631 csr.go:257] certificate signing request csr-hv4f6 is issued Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.197063 4631 apiserver.go:52] "Watching apiserver" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.234025 4631 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.234294 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h"] Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.234630 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.234771 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.234786 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.234807 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.234833 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.234940 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.235111 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.235142 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.235177 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.250471 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.253874 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.254954 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.255004 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.255125 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.255231 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.255633 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.257121 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.258194 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.290792 4631 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337101 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337518 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337656 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337681 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337703 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337721 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337740 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337756 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337775 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337792 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337809 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337827 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337844 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337862 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337882 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337902 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337923 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337940 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337960 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.337979 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338004 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338027 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338066 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338085 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338106 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338125 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338145 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338162 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338182 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338199 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338216 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338235 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338254 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338272 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338295 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338316 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338333 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338351 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338382 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338351 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338418 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338402 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338556 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338591 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338611 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338630 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338652 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338673 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338696 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338714 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338736 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338759 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338779 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338797 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338817 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338886 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338904 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338923 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338954 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338971 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.338992 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339011 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339056 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339073 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339095 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339119 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339139 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339181 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339203 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339224 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339243 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339262 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339282 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339303 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339322 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339327 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339343 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339453 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339483 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339506 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339516 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339533 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339557 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339577 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339600 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339622 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339645 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339668 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339762 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339790 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339813 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339836 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339861 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339897 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339920 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339945 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339999 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340024 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340051 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340082 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340112 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340139 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340171 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340194 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340214 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340233 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340258 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340289 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340318 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340347 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340426 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340455 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340476 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340498 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340519 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340537 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340560 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340583 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340616 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340640 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340664 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340683 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340701 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340719 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340736 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340755 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340782 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340800 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340818 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340835 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340851 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340867 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340892 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340909 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340924 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340942 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340967 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340999 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.341022 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.341045 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.341081 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.341113 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.341142 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.341163 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.341186 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.341213 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.341236 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.341270 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.341300 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.341323 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.341347 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.342749 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.342782 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.342805 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.342831 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.342864 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.342892 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.342919 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.342949 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.342972 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.342995 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.343019 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.343056 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.343092 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.344977 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345019 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345043 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345061 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345090 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345108 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345124 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345142 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345158 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345182 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345210 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345256 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345285 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345311 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345335 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345358 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345413 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345433 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345456 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345482 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345531 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345567 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345587 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345608 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345625 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345643 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345660 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345677 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345695 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345730 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345758 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345778 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345797 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345815 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345834 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345851 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345894 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345921 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345942 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345963 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345986 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.346013 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.346037 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.346061 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.346087 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.346112 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.346130 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.346149 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.346170 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.346187 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.346238 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.346253 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.346265 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.346277 4631 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.346288 4631 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.347250 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339514 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339556 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.354871 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339728 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339744 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339750 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339936 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.339942 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340119 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340136 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340293 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340296 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340333 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340516 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340564 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340649 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340721 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340742 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340921 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.340947 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.341110 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.341226 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.341277 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.341848 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.341988 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.342043 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.342161 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.342273 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.342332 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.342906 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.343158 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.343176 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.343209 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.343421 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.343491 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.343510 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.343700 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.343724 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.343955 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.344052 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.344200 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.344286 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.344308 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.344576 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.344587 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.344598 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.344762 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.344774 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.344780 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.344947 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.344957 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345107 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.345262 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.346268 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.346435 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.346644 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.346655 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.347038 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.347324 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.347338 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.347700 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.347740 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.347787 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.347944 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.348136 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.348435 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.349012 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.349252 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.349306 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.349870 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.350098 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.350127 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.350315 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.350365 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.350599 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.350532 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.350751 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.350778 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.350857 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.354526 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.354674 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.355020 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.355061 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.355191 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.355196 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:28:16.855172616 +0000 UTC m=+26.887414624 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.355459 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.355598 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.355675 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.355678 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.355889 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.356084 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.356154 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.356164 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.356165 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.356334 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.356401 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.356655 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.356681 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.356764 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.357007 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.357041 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.357192 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.357466 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.357533 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.357651 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.357911 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.357945 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.358118 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.358260 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.358625 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.358793 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.358971 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.359042 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.359221 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.359472 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.359485 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.361097 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.362077 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.363882 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.364125 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.364487 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.364670 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.365165 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.365792 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.366976 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.367491 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.367675 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.366267 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.367887 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.367992 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.368455 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.368160 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.368688 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.369019 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.370305 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.370895 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.372573 4631 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.382002 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:16.881952604 +0000 UTC m=+26.914194612 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.382258 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.373745 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.373936 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.374105 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.382519 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.374494 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.374858 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.375345 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.380965 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.381524 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.386478 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.386649 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.387045 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.387501 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.387625 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.387638 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.390401 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.391521 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.391569 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.391729 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.391799 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.392055 4631 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.392172 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:16.892148007 +0000 UTC m=+26.924390005 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.396862 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.396876 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.397505 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.397535 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.399582 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.400786 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.401962 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.370895 4631 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.402760 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.403017 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.403298 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.403629 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.405341 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.405766 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.405765 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.405997 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.406049 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.406081 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.406362 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.406514 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.406654 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.406680 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.407296 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.407397 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.407527 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.407674 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.418163 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.419170 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.419202 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.419237 4631 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.419949 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:16.919925143 +0000 UTC m=+26.952167141 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.420081 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.420107 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.420119 4631 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.420153 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:16.92014317 +0000 UTC m=+26.952385168 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.436026 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.436976 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.437336 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.437985 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.438234 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.439907 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.440528 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-dzxft"] Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.440792 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-q27wh"] Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.440985 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.441222 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-dzxft" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.446991 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.447117 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.447462 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.447519 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.447769 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.447954 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448130 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448151 4631 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448163 4631 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448173 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448182 4631 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448191 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448201 4631 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448210 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448218 4631 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448238 4631 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448248 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448257 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448279 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448289 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448298 4631 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448306 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448315 4631 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448323 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448332 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448341 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448350 4631 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448359 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448379 4631 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448387 4631 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448399 4631 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448408 4631 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448639 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448732 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448191 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448229 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448265 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448301 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.448389 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.450745 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.451021 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.458712 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.465028 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467428 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467494 4631 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467507 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467518 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467528 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467539 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467547 4631 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467555 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467564 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467576 4631 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467585 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467594 4631 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467608 4631 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467619 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467633 4631 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467646 4631 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467655 4631 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467665 4631 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467673 4631 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467682 4631 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467691 4631 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467703 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467714 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467724 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467737 4631 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467747 4631 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467755 4631 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467767 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467776 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467785 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467796 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467810 4631 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467822 4631 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467832 4631 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467844 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467853 4631 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467862 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467871 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467879 4631 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467889 4631 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467901 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467913 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467924 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467935 4631 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467946 4631 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467956 4631 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467967 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467977 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467988 4631 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.467999 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468013 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468024 4631 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468035 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468045 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468056 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468067 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468079 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468090 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468101 4631 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468113 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468124 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468136 4631 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468146 4631 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468157 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468168 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468179 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468190 4631 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468204 4631 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468214 4631 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468226 4631 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468236 4631 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468247 4631 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468259 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468268 4631 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468276 4631 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468284 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468293 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468301 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468309 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468317 4631 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468325 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468341 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468355 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468366 4631 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468391 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468400 4631 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468408 4631 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468416 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468426 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468455 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468463 4631 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468471 4631 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468479 4631 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468487 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468495 4631 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468503 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468512 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468519 4631 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468527 4631 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468535 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468543 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468551 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468559 4631 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468567 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468576 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468585 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468593 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468601 4631 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468609 4631 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468617 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468625 4631 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468632 4631 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468640 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468648 4631 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468657 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468666 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468674 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468682 4631 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468692 4631 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468701 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468709 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468717 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468727 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468736 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468744 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468752 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468760 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468768 4631 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468777 4631 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468784 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468792 4631 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468800 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468808 4631 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468816 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468824 4631 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468833 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468841 4631 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468848 4631 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468856 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468865 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468872 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468880 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468892 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468900 4631 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468908 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468916 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468924 4631 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468932 4631 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468939 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468949 4631 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468957 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468965 4631 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468972 4631 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468980 4631 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.468993 4631 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.469001 4631 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.470474 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.479920 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.482386 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.483026 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.494878 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.495625 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.500648 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.505460 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.506929 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.507291 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.536501 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.548395 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.553130 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.564557 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.571760 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68wxj\" (UniqueName: \"kubernetes.io/projected/35aedb25-9161-4c4e-a563-097dd7c4bc7e-kube-api-access-68wxj\") pod \"node-resolver-dzxft\" (UID: \"35aedb25-9161-4c4e-a563-097dd7c4bc7e\") " pod="openshift-dns/node-resolver-dzxft" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.571794 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/35aedb25-9161-4c4e-a563-097dd7c4bc7e-hosts-file\") pod \"node-resolver-dzxft\" (UID: \"35aedb25-9161-4c4e-a563-097dd7c4bc7e\") " pod="openshift-dns/node-resolver-dzxft" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.571813 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fc938ac1-b2a3-4435-bda5-c7be66763a01-proxy-tls\") pod \"machine-config-daemon-q27wh\" (UID: \"fc938ac1-b2a3-4435-bda5-c7be66763a01\") " pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.571830 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fc938ac1-b2a3-4435-bda5-c7be66763a01-mcd-auth-proxy-config\") pod \"machine-config-daemon-q27wh\" (UID: \"fc938ac1-b2a3-4435-bda5-c7be66763a01\") " pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.571855 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6788\" (UniqueName: \"kubernetes.io/projected/fc938ac1-b2a3-4435-bda5-c7be66763a01-kube-api-access-d6788\") pod \"machine-config-daemon-q27wh\" (UID: \"fc938ac1-b2a3-4435-bda5-c7be66763a01\") " pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.571870 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fc938ac1-b2a3-4435-bda5-c7be66763a01-rootfs\") pod \"machine-config-daemon-q27wh\" (UID: \"fc938ac1-b2a3-4435-bda5-c7be66763a01\") " pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.571925 4631 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.571935 4631 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.571944 4631 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.571952 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.571961 4631 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.571971 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.571982 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.572149 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.576310 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.578869 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.583099 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.598878 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.617767 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.640103 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.656572 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.676023 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.676175 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.677747 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68wxj\" (UniqueName: \"kubernetes.io/projected/35aedb25-9161-4c4e-a563-097dd7c4bc7e-kube-api-access-68wxj\") pod \"node-resolver-dzxft\" (UID: \"35aedb25-9161-4c4e-a563-097dd7c4bc7e\") " pod="openshift-dns/node-resolver-dzxft" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.677787 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/35aedb25-9161-4c4e-a563-097dd7c4bc7e-hosts-file\") pod \"node-resolver-dzxft\" (UID: \"35aedb25-9161-4c4e-a563-097dd7c4bc7e\") " pod="openshift-dns/node-resolver-dzxft" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.677807 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fc938ac1-b2a3-4435-bda5-c7be66763a01-proxy-tls\") pod \"machine-config-daemon-q27wh\" (UID: \"fc938ac1-b2a3-4435-bda5-c7be66763a01\") " pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.677823 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fc938ac1-b2a3-4435-bda5-c7be66763a01-mcd-auth-proxy-config\") pod \"machine-config-daemon-q27wh\" (UID: \"fc938ac1-b2a3-4435-bda5-c7be66763a01\") " pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.677848 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6788\" (UniqueName: \"kubernetes.io/projected/fc938ac1-b2a3-4435-bda5-c7be66763a01-kube-api-access-d6788\") pod \"machine-config-daemon-q27wh\" (UID: \"fc938ac1-b2a3-4435-bda5-c7be66763a01\") " pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.677868 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fc938ac1-b2a3-4435-bda5-c7be66763a01-rootfs\") pod \"machine-config-daemon-q27wh\" (UID: \"fc938ac1-b2a3-4435-bda5-c7be66763a01\") " pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.677935 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fc938ac1-b2a3-4435-bda5-c7be66763a01-rootfs\") pod \"machine-config-daemon-q27wh\" (UID: \"fc938ac1-b2a3-4435-bda5-c7be66763a01\") " pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.678192 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/35aedb25-9161-4c4e-a563-097dd7c4bc7e-hosts-file\") pod \"node-resolver-dzxft\" (UID: \"35aedb25-9161-4c4e-a563-097dd7c4bc7e\") " pod="openshift-dns/node-resolver-dzxft" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.680626 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fc938ac1-b2a3-4435-bda5-c7be66763a01-mcd-auth-proxy-config\") pod \"machine-config-daemon-q27wh\" (UID: \"fc938ac1-b2a3-4435-bda5-c7be66763a01\") " pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.685524 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fc938ac1-b2a3-4435-bda5-c7be66763a01-proxy-tls\") pod \"machine-config-daemon-q27wh\" (UID: \"fc938ac1-b2a3-4435-bda5-c7be66763a01\") " pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.690933 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.699667 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6788\" (UniqueName: \"kubernetes.io/projected/fc938ac1-b2a3-4435-bda5-c7be66763a01-kube-api-access-d6788\") pod \"machine-config-daemon-q27wh\" (UID: \"fc938ac1-b2a3-4435-bda5-c7be66763a01\") " pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.699679 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68wxj\" (UniqueName: \"kubernetes.io/projected/35aedb25-9161-4c4e-a563-097dd7c4bc7e-kube-api-access-68wxj\") pod \"node-resolver-dzxft\" (UID: \"35aedb25-9161-4c4e-a563-097dd7c4bc7e\") " pod="openshift-dns/node-resolver-dzxft" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.703333 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.712102 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.721149 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.731395 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.740795 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.756205 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.765943 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.775931 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.789226 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.790284 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.795425 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-zllp2"] Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.795719 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.796750 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-bfhhc"] Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.797442 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.798160 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.798202 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.799917 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.800014 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-vpgzg"] Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.800111 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.800184 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.800309 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.800454 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.800894 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.805051 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-dzxft" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.805885 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.806035 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.806086 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.806241 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.806326 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.806433 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.806559 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.806706 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 04 17:28:16 crc kubenswrapper[4631]: W1204 17:28:16.808621 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc938ac1_b2a3_4435_bda5_c7be66763a01.slice/crio-99469ee1543e3882630fc3b57969be78fae601f8ea51a60622edea1ba01c1b5c WatchSource:0}: Error finding container 99469ee1543e3882630fc3b57969be78fae601f8ea51a60622edea1ba01c1b5c: Status 404 returned error can't find the container with id 99469ee1543e3882630fc3b57969be78fae601f8ea51a60622edea1ba01c1b5c Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.825580 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.837224 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.847697 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.858563 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.868567 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880116 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880211 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-multus-socket-dir-parent\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880232 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/de540d5a-4ce4-4960-b721-75c6d9a7a02e-cni-binary-copy\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880248 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-multus-cni-dir\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880263 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-host-run-k8s-cni-cncf-io\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880276 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/de540d5a-4ce4-4960-b721-75c6d9a7a02e-cnibin\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880290 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/de540d5a-4ce4-4960-b721-75c6d9a7a02e-os-release\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880306 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880321 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-system-cni-dir\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880334 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0d617abc-dc04-4807-b684-3640cde38e81-env-overrides\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880417 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtfmf\" (UniqueName: \"kubernetes.io/projected/0d617abc-dc04-4807-b684-3640cde38e81-kube-api-access-vtfmf\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880454 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-host-run-netns\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880468 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-host-var-lib-cni-bin\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880484 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/03e821a0-13d4-417c-9e54-7073b08490db-multus-daemon-config\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880501 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/de540d5a-4ce4-4960-b721-75c6d9a7a02e-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880515 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-os-release\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880528 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-hostroot\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880546 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-cni-netd\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880571 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-run-systemd\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880587 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-run-netns\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880633 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-var-lib-openvswitch\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880658 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-host-var-lib-kubelet\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880672 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-run-openvswitch\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880688 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-cni-bin\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880704 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/de540d5a-4ce4-4960-b721-75c6d9a7a02e-tuning-conf-dir\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880727 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-cnibin\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880743 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-host-run-multus-certs\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880757 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-systemd-units\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880771 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-node-log\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880794 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/03e821a0-13d4-417c-9e54-7073b08490db-cni-binary-copy\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880808 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-etc-kubernetes\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880822 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/de540d5a-4ce4-4960-b721-75c6d9a7a02e-system-cni-dir\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880845 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xcwj\" (UniqueName: \"kubernetes.io/projected/de540d5a-4ce4-4960-b721-75c6d9a7a02e-kube-api-access-2xcwj\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880858 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-slash\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880871 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-log-socket\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880885 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0d617abc-dc04-4807-b684-3640cde38e81-ovn-node-metrics-cert\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880906 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-kubelet\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880921 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-run-ovn\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880937 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-run-ovn-kubernetes\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880950 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0d617abc-dc04-4807-b684-3640cde38e81-ovnkube-script-lib\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880969 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-host-var-lib-cni-multus\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.880993 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-multus-conf-dir\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.881010 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5rnb\" (UniqueName: \"kubernetes.io/projected/03e821a0-13d4-417c-9e54-7073b08490db-kube-api-access-d5rnb\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.881027 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-etc-openvswitch\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.881044 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0d617abc-dc04-4807-b684-3640cde38e81-ovnkube-config\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.881140 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:28:17.881124354 +0000 UTC m=+27.913366352 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.884970 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.905475 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.929954 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.938525 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.973667 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982690 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-system-cni-dir\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982726 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0d617abc-dc04-4807-b684-3640cde38e81-env-overrides\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982744 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982762 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-host-run-netns\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982775 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-host-var-lib-cni-bin\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982789 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/03e821a0-13d4-417c-9e54-7073b08490db-multus-daemon-config\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982803 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/de540d5a-4ce4-4960-b721-75c6d9a7a02e-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982818 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtfmf\" (UniqueName: \"kubernetes.io/projected/0d617abc-dc04-4807-b684-3640cde38e81-kube-api-access-vtfmf\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982834 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-hostroot\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982848 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-cni-netd\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982864 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-os-release\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982879 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-run-systemd\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982893 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-run-netns\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982906 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-var-lib-openvswitch\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982922 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-host-var-lib-kubelet\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982936 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-run-openvswitch\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982949 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-cni-bin\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982967 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982980 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-cnibin\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.982994 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-host-run-multus-certs\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983007 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/de540d5a-4ce4-4960-b721-75c6d9a7a02e-tuning-conf-dir\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983021 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-node-log\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983035 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-systemd-units\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983051 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983067 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-etc-kubernetes\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983083 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/de540d5a-4ce4-4960-b721-75c6d9a7a02e-system-cni-dir\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983098 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xcwj\" (UniqueName: \"kubernetes.io/projected/de540d5a-4ce4-4960-b721-75c6d9a7a02e-kube-api-access-2xcwj\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983112 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-slash\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983125 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-log-socket\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983139 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/03e821a0-13d4-417c-9e54-7073b08490db-cni-binary-copy\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983157 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983172 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-kubelet\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983187 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-run-ovn\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983205 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-run-ovn-kubernetes\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983219 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0d617abc-dc04-4807-b684-3640cde38e81-ovn-node-metrics-cert\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983233 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0d617abc-dc04-4807-b684-3640cde38e81-ovnkube-script-lib\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983248 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-host-var-lib-cni-multus\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983262 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-multus-conf-dir\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983276 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5rnb\" (UniqueName: \"kubernetes.io/projected/03e821a0-13d4-417c-9e54-7073b08490db-kube-api-access-d5rnb\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983290 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-etc-openvswitch\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983304 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0d617abc-dc04-4807-b684-3640cde38e81-ovnkube-config\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983322 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-multus-socket-dir-parent\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983338 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/de540d5a-4ce4-4960-b721-75c6d9a7a02e-cni-binary-copy\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983354 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-multus-cni-dir\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983383 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-host-run-k8s-cni-cncf-io\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983397 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/de540d5a-4ce4-4960-b721-75c6d9a7a02e-cnibin\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983412 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/de540d5a-4ce4-4960-b721-75c6d9a7a02e-os-release\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983427 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983483 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.983533 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-system-cni-dir\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.984075 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0d617abc-dc04-4807-b684-3640cde38e81-env-overrides\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.984115 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-etc-kubernetes\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.984182 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.984197 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.984207 4631 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.984239 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:17.984228371 +0000 UTC m=+28.016470369 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.984266 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-host-run-netns\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.984288 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-host-var-lib-cni-bin\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.984806 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/03e821a0-13d4-417c-9e54-7073b08490db-multus-daemon-config\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.984948 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/de540d5a-4ce4-4960-b721-75c6d9a7a02e-system-cni-dir\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.985234 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-slash\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.985263 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-log-socket\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.985272 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/de540d5a-4ce4-4960-b721-75c6d9a7a02e-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.985501 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-hostroot\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.985531 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-cni-netd\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.985573 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-os-release\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.985595 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-run-systemd\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.985616 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-run-netns\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.985638 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-var-lib-openvswitch\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.985660 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-host-var-lib-kubelet\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.985681 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-run-openvswitch\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.985705 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-cni-bin\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.985735 4631 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.985759 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:17.985751695 +0000 UTC m=+28.017993693 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.985789 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-cnibin\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.985814 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-host-run-multus-certs\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.985860 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/03e821a0-13d4-417c-9e54-7073b08490db-cni-binary-copy\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.985939 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.987788 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.987809 4631 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.987850 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:17.987832365 +0000 UTC m=+28.020074413 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.986249 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-kubelet\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.986267 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-run-ovn\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.986284 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-run-ovn-kubernetes\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.986328 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-host-var-lib-cni-multus\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.986848 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0d617abc-dc04-4807-b684-3640cde38e81-ovnkube-script-lib\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.987256 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/de540d5a-4ce4-4960-b721-75c6d9a7a02e-cni-binary-copy\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.987280 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-etc-openvswitch\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.987417 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-multus-conf-dir\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.987441 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-host-run-k8s-cni-cncf-io\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.987642 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-multus-cni-dir\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.987669 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/03e821a0-13d4-417c-9e54-7073b08490db-multus-socket-dir-parent\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.987672 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/de540d5a-4ce4-4960-b721-75c6d9a7a02e-cnibin\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.987698 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/de540d5a-4ce4-4960-b721-75c6d9a7a02e-os-release\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.987765 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0d617abc-dc04-4807-b684-3640cde38e81-ovnkube-config\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.985991 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-node-log\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.986091 4631 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 17:28:16 crc kubenswrapper[4631]: E1204 17:28:16.988043 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:17.98803287 +0000 UTC m=+28.020274928 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.986012 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-systemd-units\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.986086 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/de540d5a-4ce4-4960-b721-75c6d9a7a02e-tuning-conf-dir\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.993585 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:16 crc kubenswrapper[4631]: I1204 17:28:16.993927 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0d617abc-dc04-4807-b684-3640cde38e81-ovn-node-metrics-cert\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.013820 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5rnb\" (UniqueName: \"kubernetes.io/projected/03e821a0-13d4-417c-9e54-7073b08490db-kube-api-access-d5rnb\") pod \"multus-zllp2\" (UID: \"03e821a0-13d4-417c-9e54-7073b08490db\") " pod="openshift-multus/multus-zllp2" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.023125 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.024100 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xcwj\" (UniqueName: \"kubernetes.io/projected/de540d5a-4ce4-4960-b721-75c6d9a7a02e-kube-api-access-2xcwj\") pod \"multus-additional-cni-plugins-bfhhc\" (UID: \"de540d5a-4ce4-4960-b721-75c6d9a7a02e\") " pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.024806 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtfmf\" (UniqueName: \"kubernetes.io/projected/0d617abc-dc04-4807-b684-3640cde38e81-kube-api-access-vtfmf\") pod \"ovnkube-node-vpgzg\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.047409 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.064616 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.066259 4631 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-04 17:23:16 +0000 UTC, rotation deadline is 2026-10-02 15:25:28.700827118 +0000 UTC Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.066299 4631 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7245h57m11.634529833s for next certificate rotation Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.083011 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.100446 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.114165 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.122427 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-zllp2" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.125522 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.131188 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" Dec 04 17:28:17 crc kubenswrapper[4631]: W1204 17:28:17.137528 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03e821a0_13d4_417c_9e54_7073b08490db.slice/crio-14694539e3a68c181b1421525e5f20ab796d04904df3faf0d07052aa4002aa51 WatchSource:0}: Error finding container 14694539e3a68c181b1421525e5f20ab796d04904df3faf0d07052aa4002aa51: Status 404 returned error can't find the container with id 14694539e3a68c181b1421525e5f20ab796d04904df3faf0d07052aa4002aa51 Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.141601 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:17 crc kubenswrapper[4631]: W1204 17:28:17.154474 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d617abc_dc04_4807_b684_3640cde38e81.slice/crio-b107e2d89e7d0e33e6769dccef66a592afa22d2dad94b499d72acbfa8a438a4f WatchSource:0}: Error finding container b107e2d89e7d0e33e6769dccef66a592afa22d2dad94b499d72acbfa8a438a4f: Status 404 returned error can't find the container with id b107e2d89e7d0e33e6769dccef66a592afa22d2dad94b499d72acbfa8a438a4f Dec 04 17:28:17 crc kubenswrapper[4631]: W1204 17:28:17.156830 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podde540d5a_4ce4_4960_b721_75c6d9a7a02e.slice/crio-fc767c699b079b509180754979baa83982017581e6967bb3a59775c018e644a4 WatchSource:0}: Error finding container fc767c699b079b509180754979baa83982017581e6967bb3a59775c018e644a4: Status 404 returned error can't find the container with id fc767c699b079b509180754979baa83982017581e6967bb3a59775c018e644a4 Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.159014 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.177997 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.211101 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.249147 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.268528 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.283444 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.293184 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.306799 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.316174 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.324850 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.333800 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.354915 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.428592 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2"} Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.428641 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86"} Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.428654 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"7fc7f5adf30a81f919850729a0ba88267d3b477f11896c2a488d01dffbcf9c9f"} Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.430380 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" event={"ID":"de540d5a-4ce4-4960-b721-75c6d9a7a02e","Type":"ContainerStarted","Data":"fc767c699b079b509180754979baa83982017581e6967bb3a59775c018e644a4"} Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.431138 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerStarted","Data":"b107e2d89e7d0e33e6769dccef66a592afa22d2dad94b499d72acbfa8a438a4f"} Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.432041 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zllp2" event={"ID":"03e821a0-13d4-417c-9e54-7073b08490db","Type":"ContainerStarted","Data":"14694539e3a68c181b1421525e5f20ab796d04904df3faf0d07052aa4002aa51"} Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.433289 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd"} Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.433314 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"99469ee1543e3882630fc3b57969be78fae601f8ea51a60622edea1ba01c1b5c"} Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.434337 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-dzxft" event={"ID":"35aedb25-9161-4c4e-a563-097dd7c4bc7e","Type":"ContainerStarted","Data":"9ed6e4bc1503930cba626ba572776cd5c685f4cb6734ca960ed61d67ed47d1ff"} Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.438985 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3"} Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.439011 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"3d4ba793ca1322f0811416b5a3c8d7cf22e0d6a95fee2c7c69122ef9fd6d3b3b"} Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.444407 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"5afe51365613aa7df6d1d6c0805c7e60ecfee4466e501bb1bb8e5b6be8ddd4d8"} Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.446583 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.456041 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.476455 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.493463 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.512267 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.549565 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.558133 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.571936 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.670490 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:17Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.687761 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:17Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.705230 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:17Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.722357 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:17Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.765445 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:17Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.789076 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:17Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.813133 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:17Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.827852 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:17Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.864677 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:17Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.892605 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:28:17 crc kubenswrapper[4631]: E1204 17:28:17.892738 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:28:19.892719473 +0000 UTC m=+29.924961471 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.904902 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:17Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.947026 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:17Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.985766 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:17Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.994179 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.994244 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.994276 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:17 crc kubenswrapper[4631]: I1204 17:28:17.994301 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:17 crc kubenswrapper[4631]: E1204 17:28:17.994407 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 17:28:17 crc kubenswrapper[4631]: E1204 17:28:17.994442 4631 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 17:28:17 crc kubenswrapper[4631]: E1204 17:28:17.994446 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 17:28:17 crc kubenswrapper[4631]: E1204 17:28:17.994448 4631 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 17:28:17 crc kubenswrapper[4631]: E1204 17:28:17.994500 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:19.994487512 +0000 UTC m=+30.026729510 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 17:28:17 crc kubenswrapper[4631]: E1204 17:28:17.994553 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:19.994534423 +0000 UTC m=+30.026776421 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 17:28:17 crc kubenswrapper[4631]: E1204 17:28:17.994468 4631 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:17 crc kubenswrapper[4631]: E1204 17:28:17.994658 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 17:28:17 crc kubenswrapper[4631]: E1204 17:28:17.994672 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 17:28:17 crc kubenswrapper[4631]: E1204 17:28:17.994684 4631 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:17 crc kubenswrapper[4631]: E1204 17:28:17.994730 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:19.994693308 +0000 UTC m=+30.026935316 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:17 crc kubenswrapper[4631]: E1204 17:28:17.994761 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:19.994749039 +0000 UTC m=+30.026991047 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.037635 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.068308 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.112482 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.151050 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.193064 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.236484 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.238588 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.238627 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.238640 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:18 crc kubenswrapper[4631]: E1204 17:28:18.238717 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:18 crc kubenswrapper[4631]: E1204 17:28:18.238805 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:18 crc kubenswrapper[4631]: E1204 17:28:18.238870 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.243691 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.244716 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.245604 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.246361 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.247111 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.247739 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.248583 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.249291 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.253702 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.254475 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.255797 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.256769 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.258006 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.258686 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.259918 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.260604 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.262156 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.262860 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.263595 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.264951 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.265767 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.266475 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.267635 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.268483 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.269028 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.269678 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.270309 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.270791 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.272541 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.273273 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.274114 4631 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.274215 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.275894 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.276970 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.277463 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.279985 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.281183 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.281793 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.283079 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.283869 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.284357 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.285542 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.286565 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.287237 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.288142 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.288782 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.289842 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.290653 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.292035 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.292726 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.293264 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.294312 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.295137 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.296190 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.448398 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-dzxft" event={"ID":"35aedb25-9161-4c4e-a563-097dd7c4bc7e","Type":"ContainerStarted","Data":"1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79"} Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.450312 4631 generic.go:334] "Generic (PLEG): container finished" podID="de540d5a-4ce4-4960-b721-75c6d9a7a02e" containerID="5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0" exitCode=0 Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.450395 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" event={"ID":"de540d5a-4ce4-4960-b721-75c6d9a7a02e","Type":"ContainerDied","Data":"5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0"} Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.451582 4631 generic.go:334] "Generic (PLEG): container finished" podID="0d617abc-dc04-4807-b684-3640cde38e81" containerID="00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2" exitCode=0 Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.451629 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerDied","Data":"00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2"} Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.457809 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zllp2" event={"ID":"03e821a0-13d4-417c-9e54-7073b08490db","Type":"ContainerStarted","Data":"690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa"} Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.461506 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc"} Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.499081 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.524313 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.549677 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.564529 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.578125 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.591420 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.606333 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.625081 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.640288 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.658315 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.675045 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.711116 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.748801 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.785599 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.834968 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.866694 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.907236 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.947081 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:18 crc kubenswrapper[4631]: I1204 17:28:18.984613 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:18Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.027190 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.064750 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.106498 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.149584 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.186411 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.224818 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.265310 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.466765 4631 generic.go:334] "Generic (PLEG): container finished" podID="de540d5a-4ce4-4960-b721-75c6d9a7a02e" containerID="fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca" exitCode=0 Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.466852 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" event={"ID":"de540d5a-4ce4-4960-b721-75c6d9a7a02e","Type":"ContainerDied","Data":"fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca"} Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.475638 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerStarted","Data":"eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326"} Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.475767 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerStarted","Data":"ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23"} Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.475860 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerStarted","Data":"22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75"} Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.481988 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.496178 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.509853 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.521929 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.533442 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.551420 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.564073 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.584209 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.629898 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.666825 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.704271 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.746499 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.792753 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:19Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:19 crc kubenswrapper[4631]: I1204 17:28:19.913434 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:28:19 crc kubenswrapper[4631]: E1204 17:28:19.913629 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:28:23.913609593 +0000 UTC m=+33.945851591 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.015030 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.015082 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.015110 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.015137 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:20 crc kubenswrapper[4631]: E1204 17:28:20.015159 4631 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 17:28:20 crc kubenswrapper[4631]: E1204 17:28:20.015223 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:24.015207918 +0000 UTC m=+34.047449916 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 17:28:20 crc kubenswrapper[4631]: E1204 17:28:20.015234 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 17:28:20 crc kubenswrapper[4631]: E1204 17:28:20.015254 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 17:28:20 crc kubenswrapper[4631]: E1204 17:28:20.015266 4631 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:20 crc kubenswrapper[4631]: E1204 17:28:20.015297 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:24.01528671 +0000 UTC m=+34.047528708 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:20 crc kubenswrapper[4631]: E1204 17:28:20.015299 4631 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 17:28:20 crc kubenswrapper[4631]: E1204 17:28:20.015334 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 17:28:20 crc kubenswrapper[4631]: E1204 17:28:20.015388 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 17:28:20 crc kubenswrapper[4631]: E1204 17:28:20.015405 4631 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:20 crc kubenswrapper[4631]: E1204 17:28:20.015416 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:24.015397223 +0000 UTC m=+34.047639221 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 17:28:20 crc kubenswrapper[4631]: E1204 17:28:20.015473 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:24.015452675 +0000 UTC m=+34.047694683 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.137530 4631 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.238805 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:20 crc kubenswrapper[4631]: E1204 17:28:20.238955 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.239351 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:20 crc kubenswrapper[4631]: E1204 17:28:20.239466 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.239600 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:20 crc kubenswrapper[4631]: E1204 17:28:20.239703 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.253113 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.265284 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.276320 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.286838 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.297326 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.308598 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.320392 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.330533 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.344952 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.356180 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.373628 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.386597 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.398582 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.479981 4631 generic.go:334] "Generic (PLEG): container finished" podID="de540d5a-4ce4-4960-b721-75c6d9a7a02e" containerID="21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16" exitCode=0 Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.480035 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" event={"ID":"de540d5a-4ce4-4960-b721-75c6d9a7a02e","Type":"ContainerDied","Data":"21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16"} Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.489750 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerStarted","Data":"05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442"} Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.489804 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerStarted","Data":"48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a"} Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.489815 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerStarted","Data":"9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053"} Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.493413 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f"} Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.494294 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.507061 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.520537 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.536148 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.554716 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.564430 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.585583 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.625825 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.667407 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.704293 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.746963 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.784849 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.827203 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.865460 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.905303 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.952540 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:20 crc kubenswrapper[4631]: I1204 17:28:20.983113 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.026960 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.064326 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.106306 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.148014 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.186259 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.230276 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.268590 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.305444 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.360847 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.498634 4631 generic.go:334] "Generic (PLEG): container finished" podID="de540d5a-4ce4-4960-b721-75c6d9a7a02e" containerID="209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9" exitCode=0 Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.498727 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" event={"ID":"de540d5a-4ce4-4960-b721-75c6d9a7a02e","Type":"ContainerDied","Data":"209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9"} Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.509419 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.529970 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.545732 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.557357 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.567810 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.585146 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.624448 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.664751 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.704139 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.744020 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.771708 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-6ns2w"] Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.772081 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-6ns2w" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.786834 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.798901 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.815834 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.836309 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.856524 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.959278 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3859fd47-f59f-4401-b195-a8a9e227c472-host\") pod \"node-ca-6ns2w\" (UID: \"3859fd47-f59f-4401-b195-a8a9e227c472\") " pod="openshift-image-registry/node-ca-6ns2w" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.959334 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhnkm\" (UniqueName: \"kubernetes.io/projected/3859fd47-f59f-4401-b195-a8a9e227c472-kube-api-access-hhnkm\") pod \"node-ca-6ns2w\" (UID: \"3859fd47-f59f-4401-b195-a8a9e227c472\") " pod="openshift-image-registry/node-ca-6ns2w" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.959410 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3859fd47-f59f-4401-b195-a8a9e227c472-serviceca\") pod \"node-ca-6ns2w\" (UID: \"3859fd47-f59f-4401-b195-a8a9e227c472\") " pod="openshift-image-registry/node-ca-6ns2w" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.963180 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.973863 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:21 crc kubenswrapper[4631]: I1204 17:28:21.984880 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:21Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.029039 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.060147 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3859fd47-f59f-4401-b195-a8a9e227c472-serviceca\") pod \"node-ca-6ns2w\" (UID: \"3859fd47-f59f-4401-b195-a8a9e227c472\") " pod="openshift-image-registry/node-ca-6ns2w" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.060511 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3859fd47-f59f-4401-b195-a8a9e227c472-host\") pod \"node-ca-6ns2w\" (UID: \"3859fd47-f59f-4401-b195-a8a9e227c472\") " pod="openshift-image-registry/node-ca-6ns2w" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.060644 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3859fd47-f59f-4401-b195-a8a9e227c472-host\") pod \"node-ca-6ns2w\" (UID: \"3859fd47-f59f-4401-b195-a8a9e227c472\") " pod="openshift-image-registry/node-ca-6ns2w" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.060665 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhnkm\" (UniqueName: \"kubernetes.io/projected/3859fd47-f59f-4401-b195-a8a9e227c472-kube-api-access-hhnkm\") pod \"node-ca-6ns2w\" (UID: \"3859fd47-f59f-4401-b195-a8a9e227c472\") " pod="openshift-image-registry/node-ca-6ns2w" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.061458 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3859fd47-f59f-4401-b195-a8a9e227c472-serviceca\") pod \"node-ca-6ns2w\" (UID: \"3859fd47-f59f-4401-b195-a8a9e227c472\") " pod="openshift-image-registry/node-ca-6ns2w" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.063356 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.097216 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhnkm\" (UniqueName: \"kubernetes.io/projected/3859fd47-f59f-4401-b195-a8a9e227c472-kube-api-access-hhnkm\") pod \"node-ca-6ns2w\" (UID: \"3859fd47-f59f-4401-b195-a8a9e227c472\") " pod="openshift-image-registry/node-ca-6ns2w" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.127632 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.164903 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.205894 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.238648 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:22 crc kubenswrapper[4631]: E1204 17:28:22.238790 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.239196 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.239251 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:22 crc kubenswrapper[4631]: E1204 17:28:22.239331 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:22 crc kubenswrapper[4631]: E1204 17:28:22.239429 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.248640 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.284868 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.330811 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.367514 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.384817 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-6ns2w" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.405995 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.446404 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.496717 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.507785 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-6ns2w" event={"ID":"3859fd47-f59f-4401-b195-a8a9e227c472","Type":"ContainerStarted","Data":"c9ee15b98ab91f50c100b84759ad9c89a2a648b3fb0b90a249163c922291e551"} Dec 04 17:28:22 crc kubenswrapper[4631]: I1204 17:28:22.526177 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.033019 4631 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.036318 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.036393 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.036412 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.036533 4631 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.051782 4631 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.052298 4631 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.053992 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.054043 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.054059 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.054080 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.054096 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:23Z","lastTransitionTime":"2025-12-04T17:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:23 crc kubenswrapper[4631]: E1204 17:28:23.075180 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.079905 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.079942 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.079953 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.079970 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.079983 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:23Z","lastTransitionTime":"2025-12-04T17:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:23 crc kubenswrapper[4631]: E1204 17:28:23.097166 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.103683 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.103989 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.104323 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.104568 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.104710 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:23Z","lastTransitionTime":"2025-12-04T17:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:23 crc kubenswrapper[4631]: E1204 17:28:23.132567 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.138935 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.139130 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.139353 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.139685 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.139924 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:23Z","lastTransitionTime":"2025-12-04T17:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:23 crc kubenswrapper[4631]: E1204 17:28:23.151444 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.156465 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.156646 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.156754 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.156860 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.156947 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:23Z","lastTransitionTime":"2025-12-04T17:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:23 crc kubenswrapper[4631]: E1204 17:28:23.171648 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: E1204 17:28:23.171765 4631 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.173714 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.173754 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.173771 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.173796 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.173815 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:23Z","lastTransitionTime":"2025-12-04T17:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.276469 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.276509 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.276521 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.276537 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.276549 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:23Z","lastTransitionTime":"2025-12-04T17:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.378213 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.378250 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.378259 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.378272 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.378282 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:23Z","lastTransitionTime":"2025-12-04T17:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.481283 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.481358 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.481403 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.481427 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.481444 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:23Z","lastTransitionTime":"2025-12-04T17:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.515187 4631 generic.go:334] "Generic (PLEG): container finished" podID="de540d5a-4ce4-4960-b721-75c6d9a7a02e" containerID="b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf" exitCode=0 Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.515265 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" event={"ID":"de540d5a-4ce4-4960-b721-75c6d9a7a02e","Type":"ContainerDied","Data":"b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf"} Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.519546 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerStarted","Data":"c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3"} Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.521141 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-6ns2w" event={"ID":"3859fd47-f59f-4401-b195-a8a9e227c472","Type":"ContainerStarted","Data":"f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268"} Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.533124 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.548526 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.561403 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.580770 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.583656 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.583699 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.583710 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.583725 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.583737 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:23Z","lastTransitionTime":"2025-12-04T17:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.597167 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.608231 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.631734 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.648169 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.661481 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.678245 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.686853 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.686891 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.686901 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.686916 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.686926 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:23Z","lastTransitionTime":"2025-12-04T17:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.689186 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.700235 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.714086 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.726846 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.738949 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.749318 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.759819 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.770434 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.781645 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.788989 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.789027 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.789037 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.789053 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.789063 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:23Z","lastTransitionTime":"2025-12-04T17:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.798044 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.808607 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.823305 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.834638 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.845656 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.854977 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.875990 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.889766 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.891149 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.891177 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.891206 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.891220 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.891228 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:23Z","lastTransitionTime":"2025-12-04T17:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.903446 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.980023 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:28:23 crc kubenswrapper[4631]: E1204 17:28:23.980197 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:28:31.980182118 +0000 UTC m=+42.012424116 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.993862 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.993924 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.993942 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.993965 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:23 crc kubenswrapper[4631]: I1204 17:28:23.993982 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:23Z","lastTransitionTime":"2025-12-04T17:28:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.080609 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.080656 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.080678 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.080721 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:24 crc kubenswrapper[4631]: E1204 17:28:24.080830 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 17:28:24 crc kubenswrapper[4631]: E1204 17:28:24.080844 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 17:28:24 crc kubenswrapper[4631]: E1204 17:28:24.080854 4631 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:24 crc kubenswrapper[4631]: E1204 17:28:24.080880 4631 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 17:28:24 crc kubenswrapper[4631]: E1204 17:28:24.080903 4631 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 17:28:24 crc kubenswrapper[4631]: E1204 17:28:24.080897 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:32.080885136 +0000 UTC m=+42.113127134 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:24 crc kubenswrapper[4631]: E1204 17:28:24.081001 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:32.080981749 +0000 UTC m=+42.113223787 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 17:28:24 crc kubenswrapper[4631]: E1204 17:28:24.080903 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 17:28:24 crc kubenswrapper[4631]: E1204 17:28:24.081070 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 17:28:24 crc kubenswrapper[4631]: E1204 17:28:24.081088 4631 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:24 crc kubenswrapper[4631]: E1204 17:28:24.081024 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:32.08101256 +0000 UTC m=+42.113254588 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 17:28:24 crc kubenswrapper[4631]: E1204 17:28:24.081153 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:32.081135343 +0000 UTC m=+42.113377341 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.096906 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.096967 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.096977 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.096992 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.097004 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:24Z","lastTransitionTime":"2025-12-04T17:28:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.199640 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.199681 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.199689 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.199705 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.199714 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:24Z","lastTransitionTime":"2025-12-04T17:28:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.239080 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.239169 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:24 crc kubenswrapper[4631]: E1204 17:28:24.239265 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.239182 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:24 crc kubenswrapper[4631]: E1204 17:28:24.239425 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:24 crc kubenswrapper[4631]: E1204 17:28:24.239605 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.303746 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.303796 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.303811 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.303832 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.303847 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:24Z","lastTransitionTime":"2025-12-04T17:28:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.406347 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.406403 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.406414 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.406432 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.406444 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:24Z","lastTransitionTime":"2025-12-04T17:28:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.509328 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.509362 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.509397 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.509414 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.509428 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:24Z","lastTransitionTime":"2025-12-04T17:28:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.611912 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.611969 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.612016 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.612052 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.612075 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:24Z","lastTransitionTime":"2025-12-04T17:28:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.714228 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.714478 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.714572 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.714665 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.714756 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:24Z","lastTransitionTime":"2025-12-04T17:28:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.818008 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.818076 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.818097 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.818125 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.818149 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:24Z","lastTransitionTime":"2025-12-04T17:28:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.920273 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.920306 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.920315 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.920328 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:24 crc kubenswrapper[4631]: I1204 17:28:24.920338 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:24Z","lastTransitionTime":"2025-12-04T17:28:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.022768 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.022808 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.022819 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.022836 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.022848 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:25Z","lastTransitionTime":"2025-12-04T17:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.125396 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.125433 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.125442 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.125459 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.125481 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:25Z","lastTransitionTime":"2025-12-04T17:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.227870 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.227927 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.227947 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.227975 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.228023 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:25Z","lastTransitionTime":"2025-12-04T17:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.330575 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.330608 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.330616 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.330629 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.330638 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:25Z","lastTransitionTime":"2025-12-04T17:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.433338 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.433419 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.433434 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.433458 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.433473 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:25Z","lastTransitionTime":"2025-12-04T17:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.536278 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.536334 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.536356 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.536424 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.536450 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:25Z","lastTransitionTime":"2025-12-04T17:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.540128 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerStarted","Data":"23a3a183eb165c06a9d4ffb75cd35187d121f5af0c25b55983214b225ce07ff9"} Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.540430 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.540459 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.546940 4631 generic.go:334] "Generic (PLEG): container finished" podID="de540d5a-4ce4-4960-b721-75c6d9a7a02e" containerID="b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55" exitCode=0 Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.546992 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" event={"ID":"de540d5a-4ce4-4960-b721-75c6d9a7a02e","Type":"ContainerDied","Data":"b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55"} Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.557971 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.673054 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.681062 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.681115 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.681136 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.681163 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.681183 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:25Z","lastTransitionTime":"2025-12-04T17:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.688130 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.690456 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.701398 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.717530 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.732080 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.747091 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.760905 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.774997 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.784549 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.784577 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.784588 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.784602 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.784615 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:25Z","lastTransitionTime":"2025-12-04T17:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.787245 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.799657 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.815424 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.828146 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.845611 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a3a183eb165c06a9d4ffb75cd35187d121f5af0c25b55983214b225ce07ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.860540 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.872952 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.886997 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.889399 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.889428 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.889439 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.889483 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.889496 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:25Z","lastTransitionTime":"2025-12-04T17:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.906424 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.919291 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.929851 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.940958 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.955979 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.966680 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.980070 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.992251 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.992852 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.992898 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.992912 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.992929 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:25 crc kubenswrapper[4631]: I1204 17:28:25.992941 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:25Z","lastTransitionTime":"2025-12-04T17:28:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.000323 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.017478 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a3a183eb165c06a9d4ffb75cd35187d121f5af0c25b55983214b225ce07ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:26Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.033765 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:26Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.047030 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:26Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.095624 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.095652 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.095659 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.095671 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.095680 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:26Z","lastTransitionTime":"2025-12-04T17:28:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.197738 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.197776 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.197790 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.197818 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.197835 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:26Z","lastTransitionTime":"2025-12-04T17:28:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.240277 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.240333 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.240556 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:26 crc kubenswrapper[4631]: E1204 17:28:26.240546 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:26 crc kubenswrapper[4631]: E1204 17:28:26.240662 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:26 crc kubenswrapper[4631]: E1204 17:28:26.240778 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.300637 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.300683 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.300697 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.300716 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.300728 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:26Z","lastTransitionTime":"2025-12-04T17:28:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.403044 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.403096 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.403108 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.403126 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.403149 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:26Z","lastTransitionTime":"2025-12-04T17:28:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.505720 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.505778 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.505795 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.505815 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.505832 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:26Z","lastTransitionTime":"2025-12-04T17:28:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.549532 4631 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.609256 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.609319 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.609331 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.609390 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.609403 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:26Z","lastTransitionTime":"2025-12-04T17:28:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.712456 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.712499 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.712508 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.712525 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.712534 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:26Z","lastTransitionTime":"2025-12-04T17:28:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.815540 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.815605 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.815628 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.815656 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.815678 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:26Z","lastTransitionTime":"2025-12-04T17:28:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.919066 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.919142 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.919164 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.919193 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:26 crc kubenswrapper[4631]: I1204 17:28:26.919217 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:26Z","lastTransitionTime":"2025-12-04T17:28:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.021969 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.022034 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.022046 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.022066 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.022100 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:27Z","lastTransitionTime":"2025-12-04T17:28:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.125752 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.125810 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.125821 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.125837 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.125847 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:27Z","lastTransitionTime":"2025-12-04T17:28:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.228978 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.229030 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.229044 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.229066 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.229081 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:27Z","lastTransitionTime":"2025-12-04T17:28:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.332918 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.332959 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.332970 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.332986 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.332999 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:27Z","lastTransitionTime":"2025-12-04T17:28:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.435102 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.435172 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.435190 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.435216 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.435235 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:27Z","lastTransitionTime":"2025-12-04T17:28:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.538627 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.538686 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.538696 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.538711 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.538721 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:27Z","lastTransitionTime":"2025-12-04T17:28:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.557718 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" event={"ID":"de540d5a-4ce4-4960-b721-75c6d9a7a02e","Type":"ContainerStarted","Data":"ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a"} Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.557897 4631 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.572292 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:27Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.586350 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:27Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.601864 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:27Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.615558 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:27Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.631576 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:27Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.641308 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.641346 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.641355 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.641387 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.641398 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:27Z","lastTransitionTime":"2025-12-04T17:28:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.648533 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:27Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.660991 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:27Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.671532 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:27Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.684720 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:27Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.696287 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:27Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.713403 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a3a183eb165c06a9d4ffb75cd35187d121f5af0c25b55983214b225ce07ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:27Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.723361 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:27Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.735699 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:27Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.743405 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.743453 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.743464 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.743477 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.743486 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:27Z","lastTransitionTime":"2025-12-04T17:28:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.747887 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:27Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.846146 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.846545 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.846690 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.846854 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.847034 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:27Z","lastTransitionTime":"2025-12-04T17:28:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.950042 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.950405 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.950468 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.950528 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:27 crc kubenswrapper[4631]: I1204 17:28:27.950603 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:27Z","lastTransitionTime":"2025-12-04T17:28:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.053564 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.053645 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.053671 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.053706 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.053779 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:28Z","lastTransitionTime":"2025-12-04T17:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.160547 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.160604 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.160636 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.160842 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.160867 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:28Z","lastTransitionTime":"2025-12-04T17:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.238916 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.238950 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:28 crc kubenswrapper[4631]: E1204 17:28:28.239030 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.239149 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:28 crc kubenswrapper[4631]: E1204 17:28:28.239349 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:28 crc kubenswrapper[4631]: E1204 17:28:28.239449 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.266089 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.266126 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.266139 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.266160 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.266174 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:28Z","lastTransitionTime":"2025-12-04T17:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.368606 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.368679 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.368700 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.368727 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.368745 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:28Z","lastTransitionTime":"2025-12-04T17:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.470883 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.470918 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.470927 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.470939 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.470949 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:28Z","lastTransitionTime":"2025-12-04T17:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.563072 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovnkube-controller/0.log" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.565879 4631 generic.go:334] "Generic (PLEG): container finished" podID="0d617abc-dc04-4807-b684-3640cde38e81" containerID="23a3a183eb165c06a9d4ffb75cd35187d121f5af0c25b55983214b225ce07ff9" exitCode=1 Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.566235 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerDied","Data":"23a3a183eb165c06a9d4ffb75cd35187d121f5af0c25b55983214b225ce07ff9"} Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.567422 4631 scope.go:117] "RemoveContainer" containerID="23a3a183eb165c06a9d4ffb75cd35187d121f5af0c25b55983214b225ce07ff9" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.572915 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.572967 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.572983 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.573000 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.573018 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:28Z","lastTransitionTime":"2025-12-04T17:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.586610 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:28Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.600742 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:28Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.614554 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:28Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.626998 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:28Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.638115 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:28Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.648432 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:28Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.659559 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:28Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.668217 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:28Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.675127 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.675166 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.675175 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.675189 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.675200 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:28Z","lastTransitionTime":"2025-12-04T17:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.680328 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:28Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.692551 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:28Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.703030 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:28Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.716621 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:28Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.725819 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:28Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.742822 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a3a183eb165c06a9d4ffb75cd35187d121f5af0c25b55983214b225ce07ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a3a183eb165c06a9d4ffb75cd35187d121f5af0c25b55983214b225ce07ff9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"message\\\":\\\"*v1.EgressIP event handler 8 for removal\\\\nI1204 17:28:27.494096 5826 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:27.494171 5826 factory.go:656] Stopping watch factory\\\\nI1204 17:28:27.494199 5826 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 17:28:27.494281 5826 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 17:28:27.494327 5826 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1204 17:28:27.494353 5826 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 17:28:27.494577 5826 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1204 17:28:27.494067 5826 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 17:28:27.494781 5826 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 17:28:27.494213 5826 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1204 17:28:27.494216 5826 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 17:28:27.494404 5826 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:28Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.777545 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.777576 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.777584 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.777613 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.777622 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:28Z","lastTransitionTime":"2025-12-04T17:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.880189 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.880230 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.880239 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.880253 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.880263 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:28Z","lastTransitionTime":"2025-12-04T17:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.983612 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.983651 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.983663 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.983680 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:28 crc kubenswrapper[4631]: I1204 17:28:28.983691 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:28Z","lastTransitionTime":"2025-12-04T17:28:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.086091 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.086158 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.086180 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.086208 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.086265 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:29Z","lastTransitionTime":"2025-12-04T17:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.189751 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.189787 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.189799 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.189817 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.189826 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:29Z","lastTransitionTime":"2025-12-04T17:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.293099 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.293430 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.293459 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.293495 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.293521 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:29Z","lastTransitionTime":"2025-12-04T17:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.397064 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.397117 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.397128 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.397145 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.397156 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:29Z","lastTransitionTime":"2025-12-04T17:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.895266 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn"] Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.899038 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.901856 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.901926 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.902083 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.903875 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.903903 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:29Z","lastTransitionTime":"2025-12-04T17:28:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.904163 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.904651 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.921607 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:29Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.951282 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a3a183eb165c06a9d4ffb75cd35187d121f5af0c25b55983214b225ce07ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a3a183eb165c06a9d4ffb75cd35187d121f5af0c25b55983214b225ce07ff9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"message\\\":\\\"*v1.EgressIP event handler 8 for removal\\\\nI1204 17:28:27.494096 5826 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:27.494171 5826 factory.go:656] Stopping watch factory\\\\nI1204 17:28:27.494199 5826 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 17:28:27.494281 5826 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 17:28:27.494327 5826 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1204 17:28:27.494353 5826 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 17:28:27.494577 5826 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1204 17:28:27.494067 5826 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 17:28:27.494781 5826 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 17:28:27.494213 5826 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1204 17:28:27.494216 5826 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 17:28:27.494404 5826 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:29Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.970040 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:29Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.985630 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:29Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.995744 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/29a77294-77b9-4074-9cb9-e31985b67447-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-88qcn\" (UID: \"29a77294-77b9-4074-9cb9-e31985b67447\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.995795 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/29a77294-77b9-4074-9cb9-e31985b67447-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-88qcn\" (UID: \"29a77294-77b9-4074-9cb9-e31985b67447\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.995827 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/29a77294-77b9-4074-9cb9-e31985b67447-env-overrides\") pod \"ovnkube-control-plane-749d76644c-88qcn\" (UID: \"29a77294-77b9-4074-9cb9-e31985b67447\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" Dec 04 17:28:29 crc kubenswrapper[4631]: I1204 17:28:29.995884 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45vbb\" (UniqueName: \"kubernetes.io/projected/29a77294-77b9-4074-9cb9-e31985b67447-kube-api-access-45vbb\") pod \"ovnkube-control-plane-749d76644c-88qcn\" (UID: \"29a77294-77b9-4074-9cb9-e31985b67447\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.001407 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:29Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.006828 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.006885 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.006901 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.006921 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.006938 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:30Z","lastTransitionTime":"2025-12-04T17:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.022174 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.042005 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.053817 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.067091 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.080327 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.092760 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.096825 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45vbb\" (UniqueName: \"kubernetes.io/projected/29a77294-77b9-4074-9cb9-e31985b67447-kube-api-access-45vbb\") pod \"ovnkube-control-plane-749d76644c-88qcn\" (UID: \"29a77294-77b9-4074-9cb9-e31985b67447\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.096870 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/29a77294-77b9-4074-9cb9-e31985b67447-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-88qcn\" (UID: \"29a77294-77b9-4074-9cb9-e31985b67447\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.096891 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/29a77294-77b9-4074-9cb9-e31985b67447-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-88qcn\" (UID: \"29a77294-77b9-4074-9cb9-e31985b67447\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.096911 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/29a77294-77b9-4074-9cb9-e31985b67447-env-overrides\") pod \"ovnkube-control-plane-749d76644c-88qcn\" (UID: \"29a77294-77b9-4074-9cb9-e31985b67447\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.097568 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/29a77294-77b9-4074-9cb9-e31985b67447-env-overrides\") pod \"ovnkube-control-plane-749d76644c-88qcn\" (UID: \"29a77294-77b9-4074-9cb9-e31985b67447\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.097887 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/29a77294-77b9-4074-9cb9-e31985b67447-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-88qcn\" (UID: \"29a77294-77b9-4074-9cb9-e31985b67447\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.103504 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.104691 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/29a77294-77b9-4074-9cb9-e31985b67447-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-88qcn\" (UID: \"29a77294-77b9-4074-9cb9-e31985b67447\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.110205 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.110258 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.110270 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.110290 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.110302 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:30Z","lastTransitionTime":"2025-12-04T17:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.116552 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45vbb\" (UniqueName: \"kubernetes.io/projected/29a77294-77b9-4074-9cb9-e31985b67447-kube-api-access-45vbb\") pod \"ovnkube-control-plane-749d76644c-88qcn\" (UID: \"29a77294-77b9-4074-9cb9-e31985b67447\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.123523 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.137868 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.152813 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.212853 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.212913 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.212933 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.212964 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.212988 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:30Z","lastTransitionTime":"2025-12-04T17:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.213809 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" Dec 04 17:28:30 crc kubenswrapper[4631]: W1204 17:28:30.226516 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod29a77294_77b9_4074_9cb9_e31985b67447.slice/crio-11579de73345291d67bbb0e51cfce8ccd5e1b6b268004175f74a455b7453f371 WatchSource:0}: Error finding container 11579de73345291d67bbb0e51cfce8ccd5e1b6b268004175f74a455b7453f371: Status 404 returned error can't find the container with id 11579de73345291d67bbb0e51cfce8ccd5e1b6b268004175f74a455b7453f371 Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.239252 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:30 crc kubenswrapper[4631]: E1204 17:28:30.239414 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.239822 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:30 crc kubenswrapper[4631]: E1204 17:28:30.239877 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.239918 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:30 crc kubenswrapper[4631]: E1204 17:28:30.239965 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.253753 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.266774 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.288983 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.300255 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.313486 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.316512 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.316545 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.316556 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.316574 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.316585 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:30Z","lastTransitionTime":"2025-12-04T17:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.328131 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.337292 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.356508 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a3a183eb165c06a9d4ffb75cd35187d121f5af0c25b55983214b225ce07ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a3a183eb165c06a9d4ffb75cd35187d121f5af0c25b55983214b225ce07ff9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"message\\\":\\\"*v1.EgressIP event handler 8 for removal\\\\nI1204 17:28:27.494096 5826 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:27.494171 5826 factory.go:656] Stopping watch factory\\\\nI1204 17:28:27.494199 5826 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 17:28:27.494281 5826 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 17:28:27.494327 5826 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1204 17:28:27.494353 5826 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 17:28:27.494577 5826 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1204 17:28:27.494067 5826 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 17:28:27.494781 5826 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 17:28:27.494213 5826 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1204 17:28:27.494216 5826 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 17:28:27.494404 5826 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.369926 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.385766 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.409865 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.419182 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.419233 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.419244 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.419262 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.419276 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:30Z","lastTransitionTime":"2025-12-04T17:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.425101 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.438477 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.452448 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.466697 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.522225 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.522651 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.522763 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.522886 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.523014 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:30Z","lastTransitionTime":"2025-12-04T17:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.625981 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.626304 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.626439 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.626557 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.626655 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:30Z","lastTransitionTime":"2025-12-04T17:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.703892 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-8kcrj"] Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.704900 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:30 crc kubenswrapper[4631]: E1204 17:28:30.704984 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.718305 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.729018 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.729067 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.729082 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.729106 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.729127 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:30Z","lastTransitionTime":"2025-12-04T17:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.742516 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a3a183eb165c06a9d4ffb75cd35187d121f5af0c25b55983214b225ce07ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a3a183eb165c06a9d4ffb75cd35187d121f5af0c25b55983214b225ce07ff9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"message\\\":\\\"*v1.EgressIP event handler 8 for removal\\\\nI1204 17:28:27.494096 5826 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:27.494171 5826 factory.go:656] Stopping watch factory\\\\nI1204 17:28:27.494199 5826 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 17:28:27.494281 5826 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 17:28:27.494327 5826 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1204 17:28:27.494353 5826 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 17:28:27.494577 5826 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1204 17:28:27.494067 5826 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 17:28:27.494781 5826 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 17:28:27.494213 5826 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1204 17:28:27.494216 5826 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 17:28:27.494404 5826 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.753385 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8kcrj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.765232 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.776863 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.789901 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.801504 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.804709 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fm8tz\" (UniqueName: \"kubernetes.io/projected/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-kube-api-access-fm8tz\") pod \"network-metrics-daemon-8kcrj\" (UID: \"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\") " pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.805132 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs\") pod \"network-metrics-daemon-8kcrj\" (UID: \"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\") " pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.822510 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.831912 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.831973 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.831985 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.832006 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.832017 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:30Z","lastTransitionTime":"2025-12-04T17:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.838170 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.856318 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.874195 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.887459 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.903480 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.903696 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" event={"ID":"29a77294-77b9-4074-9cb9-e31985b67447","Type":"ContainerStarted","Data":"11579de73345291d67bbb0e51cfce8ccd5e1b6b268004175f74a455b7453f371"} Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.906494 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs\") pod \"network-metrics-daemon-8kcrj\" (UID: \"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\") " pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.906546 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fm8tz\" (UniqueName: \"kubernetes.io/projected/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-kube-api-access-fm8tz\") pod \"network-metrics-daemon-8kcrj\" (UID: \"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\") " pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:30 crc kubenswrapper[4631]: E1204 17:28:30.906954 4631 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 17:28:30 crc kubenswrapper[4631]: E1204 17:28:30.907009 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs podName:86903bd1-674d-4fa2-b9d1-dbc8f347b72b nodeName:}" failed. No retries permitted until 2025-12-04 17:28:31.406992681 +0000 UTC m=+41.439234689 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs") pod "network-metrics-daemon-8kcrj" (UID: "86903bd1-674d-4fa2-b9d1-dbc8f347b72b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.921557 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.924854 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fm8tz\" (UniqueName: \"kubernetes.io/projected/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-kube-api-access-fm8tz\") pod \"network-metrics-daemon-8kcrj\" (UID: \"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\") " pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.934309 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.934354 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.934367 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.934404 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.934418 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:30Z","lastTransitionTime":"2025-12-04T17:28:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.938563 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:30 crc kubenswrapper[4631]: I1204 17:28:30.953764 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.036617 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.036660 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.036670 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.036683 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.036692 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:31Z","lastTransitionTime":"2025-12-04T17:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.138645 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.138695 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.138706 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.138724 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.138734 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:31Z","lastTransitionTime":"2025-12-04T17:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.241643 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.241738 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.241762 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.241792 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.241815 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:31Z","lastTransitionTime":"2025-12-04T17:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.344217 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.344256 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.344265 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.344280 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.344289 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:31Z","lastTransitionTime":"2025-12-04T17:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.412357 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs\") pod \"network-metrics-daemon-8kcrj\" (UID: \"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\") " pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:31 crc kubenswrapper[4631]: E1204 17:28:31.412528 4631 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 17:28:31 crc kubenswrapper[4631]: E1204 17:28:31.412628 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs podName:86903bd1-674d-4fa2-b9d1-dbc8f347b72b nodeName:}" failed. No retries permitted until 2025-12-04 17:28:32.412606574 +0000 UTC m=+42.444848582 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs") pod "network-metrics-daemon-8kcrj" (UID: "86903bd1-674d-4fa2-b9d1-dbc8f347b72b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.446568 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.446653 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.446679 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.446715 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.446739 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:31Z","lastTransitionTime":"2025-12-04T17:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.552071 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.552122 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.552137 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.552153 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.552168 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:31Z","lastTransitionTime":"2025-12-04T17:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.654565 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.654615 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.654631 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.654651 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.654666 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:31Z","lastTransitionTime":"2025-12-04T17:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.757162 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.757237 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.757256 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.757285 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.757520 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:31Z","lastTransitionTime":"2025-12-04T17:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.859974 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.860029 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.860043 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.860062 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.860076 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:31Z","lastTransitionTime":"2025-12-04T17:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.909994 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovnkube-controller/0.log" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.913482 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerStarted","Data":"8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c"} Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.915436 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" event={"ID":"29a77294-77b9-4074-9cb9-e31985b67447","Type":"ContainerStarted","Data":"712c54ce6d416f06964e2a50bf902d2c18b7bf713a9d67265bfbadc2fbf4ee6d"} Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.963149 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.963208 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.963219 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.963234 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:31 crc kubenswrapper[4631]: I1204 17:28:31.963246 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:31Z","lastTransitionTime":"2025-12-04T17:28:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.017007 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.017282 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:28:48.017245719 +0000 UTC m=+58.049487737 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.066425 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.066779 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.066791 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.066807 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.066818 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:32Z","lastTransitionTime":"2025-12-04T17:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.117966 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.118012 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.118047 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.118071 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.118135 4631 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.118174 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:48.118162534 +0000 UTC m=+58.150404522 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.118253 4631 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.118274 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:48.118268297 +0000 UTC m=+58.150510295 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.118326 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.118337 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.118347 4631 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.118380 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:48.118360329 +0000 UTC m=+58.150602327 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.118419 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.118428 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.118435 4631 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.118453 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 17:28:48.118447892 +0000 UTC m=+58.150689890 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.170829 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.170864 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.170874 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.170890 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.170903 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:32Z","lastTransitionTime":"2025-12-04T17:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.238524 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.238642 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.238785 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.238844 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.238882 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.238927 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.241249 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.241325 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.273268 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.273296 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.273305 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.273318 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.273330 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:32Z","lastTransitionTime":"2025-12-04T17:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.375636 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.375673 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.375685 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.375700 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.375712 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:32Z","lastTransitionTime":"2025-12-04T17:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.420862 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs\") pod \"network-metrics-daemon-8kcrj\" (UID: \"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\") " pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.421019 4631 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.421064 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs podName:86903bd1-674d-4fa2-b9d1-dbc8f347b72b nodeName:}" failed. No retries permitted until 2025-12-04 17:28:34.421049952 +0000 UTC m=+44.453291950 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs") pod "network-metrics-daemon-8kcrj" (UID: "86903bd1-674d-4fa2-b9d1-dbc8f347b72b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.478725 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.478760 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.478768 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.478782 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.478791 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:32Z","lastTransitionTime":"2025-12-04T17:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.581323 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.581405 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.581417 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.581453 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.581467 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:32Z","lastTransitionTime":"2025-12-04T17:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.684645 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.684712 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.684733 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.684766 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.684787 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:32Z","lastTransitionTime":"2025-12-04T17:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.788788 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.788844 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.788863 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.788890 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.788909 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:32Z","lastTransitionTime":"2025-12-04T17:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.893063 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.893124 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.893145 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.893174 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.893196 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:32Z","lastTransitionTime":"2025-12-04T17:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.924162 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovnkube-controller/1.log" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.925326 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovnkube-controller/0.log" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.933925 4631 generic.go:334] "Generic (PLEG): container finished" podID="0d617abc-dc04-4807-b684-3640cde38e81" containerID="8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c" exitCode=1 Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.934101 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerDied","Data":"8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c"} Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.934202 4631 scope.go:117] "RemoveContainer" containerID="23a3a183eb165c06a9d4ffb75cd35187d121f5af0c25b55983214b225ce07ff9" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.938731 4631 scope.go:117] "RemoveContainer" containerID="8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c" Dec 04 17:28:32 crc kubenswrapper[4631]: E1204 17:28:32.939093 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podUID="0d617abc-dc04-4807-b684-3640cde38e81" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.940688 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" event={"ID":"29a77294-77b9-4074-9cb9-e31985b67447","Type":"ContainerStarted","Data":"c96709e063c20b40adae03f18364ec6297f144e6454670c5ab760f150837e165"} Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.962034 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8kcrj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:32Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.978911 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:32Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.997197 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.997289 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.997316 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.997355 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:32 crc kubenswrapper[4631]: I1204 17:28:32.997409 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:32Z","lastTransitionTime":"2025-12-04T17:28:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.005706 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a3a183eb165c06a9d4ffb75cd35187d121f5af0c25b55983214b225ce07ff9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"message\\\":\\\"*v1.EgressIP event handler 8 for removal\\\\nI1204 17:28:27.494096 5826 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:27.494171 5826 factory.go:656] Stopping watch factory\\\\nI1204 17:28:27.494199 5826 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 17:28:27.494281 5826 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 17:28:27.494327 5826 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1204 17:28:27.494353 5826 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 17:28:27.494577 5826 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1204 17:28:27.494067 5826 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 17:28:27.494781 5826 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 17:28:27.494213 5826 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1204 17:28:27.494216 5826 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 17:28:27.494404 5826 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"message\\\":\\\"d/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:32.456090 5968 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1204 17:28:32.456122 5968 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:32.456156 5968 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 17:28:32.456172 5968 factory.go:656] Stopping watch factory\\\\nI1204 17:28:32.456188 5968 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 17:28:32.456195 5968 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1204 17:28:32.456272 5968 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1204 17:28:32.502496 5968 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1204 17:28:32.502546 5968 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1204 17:28:32.502608 5968 ovnkube.go:599] Stopped ovnkube\\\\nI1204 17:28:32.502641 5968 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 17:28:32.502761 5968 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.028186 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.046038 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.069781 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.093410 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.100580 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.100649 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.100676 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.100708 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.100731 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:33Z","lastTransitionTime":"2025-12-04T17:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.112319 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.130846 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.148068 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.168012 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.189074 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.203910 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.203983 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.204005 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.204028 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.204066 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:33Z","lastTransitionTime":"2025-12-04T17:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.214628 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.232114 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.247700 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.266821 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.308190 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.308315 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.308349 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.308426 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.308448 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:33Z","lastTransitionTime":"2025-12-04T17:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.309145 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.346466 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a3a183eb165c06a9d4ffb75cd35187d121f5af0c25b55983214b225ce07ff9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"message\\\":\\\"*v1.EgressIP event handler 8 for removal\\\\nI1204 17:28:27.494096 5826 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:27.494171 5826 factory.go:656] Stopping watch factory\\\\nI1204 17:28:27.494199 5826 handler.go:208] Removed *v1.Node event handler 7\\\\nI1204 17:28:27.494281 5826 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 17:28:27.494327 5826 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1204 17:28:27.494353 5826 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 17:28:27.494577 5826 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1204 17:28:27.494067 5826 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 17:28:27.494781 5826 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1204 17:28:27.494213 5826 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1204 17:28:27.494216 5826 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1204 17:28:27.494404 5826 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"message\\\":\\\"d/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:32.456090 5968 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1204 17:28:32.456122 5968 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:32.456156 5968 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 17:28:32.456172 5968 factory.go:656] Stopping watch factory\\\\nI1204 17:28:32.456188 5968 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 17:28:32.456195 5968 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1204 17:28:32.456272 5968 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1204 17:28:32.502496 5968 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1204 17:28:32.502546 5968 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1204 17:28:32.502608 5968 ovnkube.go:599] Stopped ovnkube\\\\nI1204 17:28:32.502641 5968 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 17:28:32.502761 5968 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.364345 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8kcrj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.385002 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.404314 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.411478 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.411534 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.411553 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.411578 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.411594 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:33Z","lastTransitionTime":"2025-12-04T17:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.424851 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.430162 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.430224 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.430238 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.430268 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.430285 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:33Z","lastTransitionTime":"2025-12-04T17:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:33 crc kubenswrapper[4631]: E1204 17:28:33.443811 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.444248 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.447854 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.447963 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.447975 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.447991 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.448002 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:33Z","lastTransitionTime":"2025-12-04T17:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.460530 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: E1204 17:28:33.464671 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.475938 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.475995 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.476009 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.476030 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.476229 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:33Z","lastTransitionTime":"2025-12-04T17:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.480576 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: E1204 17:28:33.495521 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.496934 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.501067 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.501101 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.501110 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.501125 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.501135 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:33Z","lastTransitionTime":"2025-12-04T17:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:33 crc kubenswrapper[4631]: E1204 17:28:33.514740 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.517849 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.519967 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.520027 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.520042 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.520065 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.520078 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:33Z","lastTransitionTime":"2025-12-04T17:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:33 crc kubenswrapper[4631]: E1204 17:28:33.535094 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: E1204 17:28:33.535257 4631 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.537461 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.537563 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.537598 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.537606 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.537622 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.537634 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:33Z","lastTransitionTime":"2025-12-04T17:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.552080 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.570728 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.582858 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.597171 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://712c54ce6d416f06964e2a50bf902d2c18b7bf713a9d67265bfbadc2fbf4ee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96709e063c20b40adae03f18364ec6297f144e6454670c5ab760f150837e165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.639977 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.640035 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.640045 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.640060 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.640073 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:33Z","lastTransitionTime":"2025-12-04T17:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.743078 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.743139 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.743153 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.743178 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.743195 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:33Z","lastTransitionTime":"2025-12-04T17:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.846344 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.846438 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.846461 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.846502 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.846526 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:33Z","lastTransitionTime":"2025-12-04T17:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.949222 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.949295 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.949309 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.949304 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovnkube-controller/1.log" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.949328 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.949463 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:33Z","lastTransitionTime":"2025-12-04T17:28:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.956980 4631 scope.go:117] "RemoveContainer" containerID="8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c" Dec 04 17:28:33 crc kubenswrapper[4631]: E1204 17:28:33.957325 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podUID="0d617abc-dc04-4807-b684-3640cde38e81" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.982118 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:33 crc kubenswrapper[4631]: I1204 17:28:33.998661 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:33Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.013237 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:34Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.032740 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:34Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.045888 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:34Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.052297 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.052332 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.052346 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.052393 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.052419 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:34Z","lastTransitionTime":"2025-12-04T17:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.058942 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://712c54ce6d416f06964e2a50bf902d2c18b7bf713a9d67265bfbadc2fbf4ee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96709e063c20b40adae03f18364ec6297f144e6454670c5ab760f150837e165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:34Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.070678 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:34Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.098005 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"message\\\":\\\"d/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:32.456090 5968 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1204 17:28:32.456122 5968 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:32.456156 5968 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 17:28:32.456172 5968 factory.go:656] Stopping watch factory\\\\nI1204 17:28:32.456188 5968 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 17:28:32.456195 5968 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1204 17:28:32.456272 5968 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1204 17:28:32.502496 5968 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1204 17:28:32.502546 5968 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1204 17:28:32.502608 5968 ovnkube.go:599] Stopped ovnkube\\\\nI1204 17:28:32.502641 5968 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 17:28:32.502761 5968 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:34Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.116217 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8kcrj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:34Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.137128 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:34Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.154411 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:34Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.155231 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.155276 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.155289 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.155309 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.155322 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:34Z","lastTransitionTime":"2025-12-04T17:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.169504 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:34Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.185038 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:34Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.198256 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:34Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.211105 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:34Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.226197 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:34Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.238710 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.238715 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:34 crc kubenswrapper[4631]: E1204 17:28:34.238966 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.238988 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.239022 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:34 crc kubenswrapper[4631]: E1204 17:28:34.239197 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:34 crc kubenswrapper[4631]: E1204 17:28:34.239293 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:34 crc kubenswrapper[4631]: E1204 17:28:34.239433 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.259554 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.259626 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.259646 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.259675 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.259698 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:34Z","lastTransitionTime":"2025-12-04T17:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.363134 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.363184 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.363196 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.363219 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.363233 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:34Z","lastTransitionTime":"2025-12-04T17:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.447159 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs\") pod \"network-metrics-daemon-8kcrj\" (UID: \"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\") " pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:34 crc kubenswrapper[4631]: E1204 17:28:34.447449 4631 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 17:28:34 crc kubenswrapper[4631]: E1204 17:28:34.447613 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs podName:86903bd1-674d-4fa2-b9d1-dbc8f347b72b nodeName:}" failed. No retries permitted until 2025-12-04 17:28:38.447575004 +0000 UTC m=+48.479817202 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs") pod "network-metrics-daemon-8kcrj" (UID: "86903bd1-674d-4fa2-b9d1-dbc8f347b72b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.467062 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.467100 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.467110 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.467133 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.467150 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:34Z","lastTransitionTime":"2025-12-04T17:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.570722 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.570772 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.570783 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.570801 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.570813 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:34Z","lastTransitionTime":"2025-12-04T17:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.674258 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.674319 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.674348 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.674381 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.674393 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:34Z","lastTransitionTime":"2025-12-04T17:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.776027 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.776121 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.776133 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.776160 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.776175 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:34Z","lastTransitionTime":"2025-12-04T17:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.879197 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.879255 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.879272 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.879296 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.879313 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:34Z","lastTransitionTime":"2025-12-04T17:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.981680 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.981734 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.981748 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.981783 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:34 crc kubenswrapper[4631]: I1204 17:28:34.981800 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:34Z","lastTransitionTime":"2025-12-04T17:28:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.084934 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.085011 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.085034 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.085057 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.085074 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:35Z","lastTransitionTime":"2025-12-04T17:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.188027 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.188097 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.188120 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.188148 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.188169 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:35Z","lastTransitionTime":"2025-12-04T17:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.291429 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.291509 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.291522 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.291539 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.291550 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:35Z","lastTransitionTime":"2025-12-04T17:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.393463 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.393518 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.393529 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.393545 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.393555 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:35Z","lastTransitionTime":"2025-12-04T17:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.497608 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.497670 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.497684 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.497701 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.497717 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:35Z","lastTransitionTime":"2025-12-04T17:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.601598 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.601652 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.601662 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.601680 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.601692 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:35Z","lastTransitionTime":"2025-12-04T17:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.703958 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.703994 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.704003 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.704015 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.704024 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:35Z","lastTransitionTime":"2025-12-04T17:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.807292 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.807391 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.807406 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.807424 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.807436 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:35Z","lastTransitionTime":"2025-12-04T17:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.910521 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.910601 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.910619 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.910645 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:35 crc kubenswrapper[4631]: I1204 17:28:35.910663 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:35Z","lastTransitionTime":"2025-12-04T17:28:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.014713 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.014796 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.014816 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.014847 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.014867 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:36Z","lastTransitionTime":"2025-12-04T17:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.118346 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.118450 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.118471 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.118501 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.118525 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:36Z","lastTransitionTime":"2025-12-04T17:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.221493 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.221556 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.221573 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.221601 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.221619 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:36Z","lastTransitionTime":"2025-12-04T17:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.239178 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.239331 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:36 crc kubenswrapper[4631]: E1204 17:28:36.239419 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.239701 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:36 crc kubenswrapper[4631]: E1204 17:28:36.239696 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.239748 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:36 crc kubenswrapper[4631]: E1204 17:28:36.239818 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:36 crc kubenswrapper[4631]: E1204 17:28:36.239899 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.324555 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.324632 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.324656 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.324686 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.324710 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:36Z","lastTransitionTime":"2025-12-04T17:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.428814 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.428900 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.428921 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.428947 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.429003 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:36Z","lastTransitionTime":"2025-12-04T17:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.531471 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.531544 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.531569 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.531597 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.531617 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:36Z","lastTransitionTime":"2025-12-04T17:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.633582 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.633628 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.633639 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.633652 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.633660 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:36Z","lastTransitionTime":"2025-12-04T17:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.737537 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.737605 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.737623 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.737650 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.737669 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:36Z","lastTransitionTime":"2025-12-04T17:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.840182 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.840240 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.840257 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.840281 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.840298 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:36Z","lastTransitionTime":"2025-12-04T17:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.944185 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.944621 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.944815 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.944989 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:36 crc kubenswrapper[4631]: I1204 17:28:36.945173 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:36Z","lastTransitionTime":"2025-12-04T17:28:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.048410 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.048742 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.048827 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.048922 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.049027 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:37Z","lastTransitionTime":"2025-12-04T17:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.152236 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.152660 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.152727 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.152798 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.152870 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:37Z","lastTransitionTime":"2025-12-04T17:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.255923 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.255977 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.255989 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.256008 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.256021 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:37Z","lastTransitionTime":"2025-12-04T17:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.358988 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.359359 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.359486 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.359568 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.359635 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:37Z","lastTransitionTime":"2025-12-04T17:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.461620 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.461955 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.462139 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.462280 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.462546 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:37Z","lastTransitionTime":"2025-12-04T17:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.565537 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.565583 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.565596 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.565617 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.565630 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:37Z","lastTransitionTime":"2025-12-04T17:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.668257 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.668306 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.668318 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.668333 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.668342 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:37Z","lastTransitionTime":"2025-12-04T17:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.772298 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.772431 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.772466 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.772499 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.772520 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:37Z","lastTransitionTime":"2025-12-04T17:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.875382 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.875434 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.875445 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.875464 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.875478 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:37Z","lastTransitionTime":"2025-12-04T17:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.913440 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.914804 4631 scope.go:117] "RemoveContainer" containerID="8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c" Dec 04 17:28:37 crc kubenswrapper[4631]: E1204 17:28:37.915164 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podUID="0d617abc-dc04-4807-b684-3640cde38e81" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.978232 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.978287 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.978303 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.978329 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:37 crc kubenswrapper[4631]: I1204 17:28:37.978363 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:37Z","lastTransitionTime":"2025-12-04T17:28:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.081978 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.082028 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.082037 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.082054 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.082064 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:38Z","lastTransitionTime":"2025-12-04T17:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.184830 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.184895 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.184911 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.184933 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.184948 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:38Z","lastTransitionTime":"2025-12-04T17:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.238969 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:38 crc kubenswrapper[4631]: E1204 17:28:38.239148 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.238970 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.239233 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:38 crc kubenswrapper[4631]: E1204 17:28:38.239254 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.238970 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:38 crc kubenswrapper[4631]: E1204 17:28:38.239444 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:38 crc kubenswrapper[4631]: E1204 17:28:38.239569 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.288748 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.288835 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.288859 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.288888 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.288907 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:38Z","lastTransitionTime":"2025-12-04T17:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.393124 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.393211 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.393229 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.393258 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.393277 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:38Z","lastTransitionTime":"2025-12-04T17:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.489685 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs\") pod \"network-metrics-daemon-8kcrj\" (UID: \"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\") " pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:38 crc kubenswrapper[4631]: E1204 17:28:38.489864 4631 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 17:28:38 crc kubenswrapper[4631]: E1204 17:28:38.489959 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs podName:86903bd1-674d-4fa2-b9d1-dbc8f347b72b nodeName:}" failed. No retries permitted until 2025-12-04 17:28:46.489934601 +0000 UTC m=+56.522176609 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs") pod "network-metrics-daemon-8kcrj" (UID: "86903bd1-674d-4fa2-b9d1-dbc8f347b72b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.496754 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.496810 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.496830 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.496856 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.496874 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:38Z","lastTransitionTime":"2025-12-04T17:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.599480 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.599516 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.599537 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.599555 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.599569 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:38Z","lastTransitionTime":"2025-12-04T17:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.702769 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.702834 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.702852 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.702878 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.702897 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:38Z","lastTransitionTime":"2025-12-04T17:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.805408 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.805452 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.805462 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.805481 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.805495 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:38Z","lastTransitionTime":"2025-12-04T17:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.908862 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.909215 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.909333 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.909496 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:38 crc kubenswrapper[4631]: I1204 17:28:38.909621 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:38Z","lastTransitionTime":"2025-12-04T17:28:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.012714 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.012762 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.012776 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.012798 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.012816 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:39Z","lastTransitionTime":"2025-12-04T17:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.115752 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.115810 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.115830 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.115856 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.115876 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:39Z","lastTransitionTime":"2025-12-04T17:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.219947 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.220031 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.220056 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.220092 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.220118 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:39Z","lastTransitionTime":"2025-12-04T17:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.324110 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.324162 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.324181 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.324208 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.324232 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:39Z","lastTransitionTime":"2025-12-04T17:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.427637 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.427709 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.427728 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.427756 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.427775 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:39Z","lastTransitionTime":"2025-12-04T17:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.531550 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.531624 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.531641 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.531674 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.531695 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:39Z","lastTransitionTime":"2025-12-04T17:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.634624 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.634676 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.634690 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.634712 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.634727 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:39Z","lastTransitionTime":"2025-12-04T17:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.736810 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.736853 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.736862 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.736875 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.736885 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:39Z","lastTransitionTime":"2025-12-04T17:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.840500 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.840592 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.840619 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.840657 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.840683 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:39Z","lastTransitionTime":"2025-12-04T17:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.944929 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.945006 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.945031 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.945067 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:39 crc kubenswrapper[4631]: I1204 17:28:39.945093 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:39Z","lastTransitionTime":"2025-12-04T17:28:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.047427 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.047470 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.047482 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.047500 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.047512 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:40Z","lastTransitionTime":"2025-12-04T17:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.150231 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.150280 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.150293 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.150310 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.150322 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:40Z","lastTransitionTime":"2025-12-04T17:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.239363 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.239550 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:40 crc kubenswrapper[4631]: E1204 17:28:40.239795 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.240491 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:40 crc kubenswrapper[4631]: E1204 17:28:40.240617 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.240666 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:40 crc kubenswrapper[4631]: E1204 17:28:40.240934 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:40 crc kubenswrapper[4631]: E1204 17:28:40.241060 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.253729 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.253787 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.253804 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.253825 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.253841 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:40Z","lastTransitionTime":"2025-12-04T17:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.257967 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8kcrj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:40Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.270105 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:40Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.300355 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"message\\\":\\\"d/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:32.456090 5968 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1204 17:28:32.456122 5968 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:32.456156 5968 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 17:28:32.456172 5968 factory.go:656] Stopping watch factory\\\\nI1204 17:28:32.456188 5968 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 17:28:32.456195 5968 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1204 17:28:32.456272 5968 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1204 17:28:32.502496 5968 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1204 17:28:32.502546 5968 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1204 17:28:32.502608 5968 ovnkube.go:599] Stopped ovnkube\\\\nI1204 17:28:32.502641 5968 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 17:28:32.502761 5968 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:40Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.316302 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:40Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.332857 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:40Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.349716 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:40Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.356221 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.356272 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.356289 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.356314 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.356331 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:40Z","lastTransitionTime":"2025-12-04T17:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.370615 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:40Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.384536 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:40Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.403683 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:40Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.421521 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:40Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.438046 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:40Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.456300 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:40Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.458812 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.458867 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.458881 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.458907 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.458923 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:40Z","lastTransitionTime":"2025-12-04T17:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.478219 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:40Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.492912 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:40Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.508335 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://712c54ce6d416f06964e2a50bf902d2c18b7bf713a9d67265bfbadc2fbf4ee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96709e063c20b40adae03f18364ec6297f144e6454670c5ab760f150837e165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:40Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.526250 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:40Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.562697 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.562770 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.562781 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.562798 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.562810 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:40Z","lastTransitionTime":"2025-12-04T17:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.665294 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.665347 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.665362 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.665400 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.665416 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:40Z","lastTransitionTime":"2025-12-04T17:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.768741 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.768813 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.768827 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.768850 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.768867 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:40Z","lastTransitionTime":"2025-12-04T17:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.871977 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.872045 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.872058 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.872122 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.872139 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:40Z","lastTransitionTime":"2025-12-04T17:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.974647 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.974896 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.974908 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.974930 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:40 crc kubenswrapper[4631]: I1204 17:28:40.974944 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:40Z","lastTransitionTime":"2025-12-04T17:28:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.077337 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.077425 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.077441 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.077465 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.077482 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:41Z","lastTransitionTime":"2025-12-04T17:28:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.181144 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.181230 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.181254 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.181282 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.181303 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:41Z","lastTransitionTime":"2025-12-04T17:28:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.284995 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.285306 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.285364 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.285507 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.285564 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:41Z","lastTransitionTime":"2025-12-04T17:28:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.388579 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.388673 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.388696 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.388724 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.388743 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:41Z","lastTransitionTime":"2025-12-04T17:28:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.492041 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.492099 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.492113 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.492133 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.492147 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:41Z","lastTransitionTime":"2025-12-04T17:28:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.595105 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.595182 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.595198 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.595223 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.595243 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:41Z","lastTransitionTime":"2025-12-04T17:28:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.698694 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.698771 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.698790 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.698824 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.698844 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:41Z","lastTransitionTime":"2025-12-04T17:28:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.801919 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.801956 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.801969 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.801988 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.802001 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:41Z","lastTransitionTime":"2025-12-04T17:28:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.905268 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.905320 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.905332 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.905352 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:41 crc kubenswrapper[4631]: I1204 17:28:41.905365 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:41Z","lastTransitionTime":"2025-12-04T17:28:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.008637 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.008721 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.008734 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.008751 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.008765 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:42Z","lastTransitionTime":"2025-12-04T17:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.111276 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.111335 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.111351 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.111395 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.111412 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:42Z","lastTransitionTime":"2025-12-04T17:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.213550 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.213642 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.213659 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.213685 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.213704 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:42Z","lastTransitionTime":"2025-12-04T17:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.238631 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.238673 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.238726 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:42 crc kubenswrapper[4631]: E1204 17:28:42.238863 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.238945 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:42 crc kubenswrapper[4631]: E1204 17:28:42.239056 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:42 crc kubenswrapper[4631]: E1204 17:28:42.239157 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:42 crc kubenswrapper[4631]: E1204 17:28:42.239234 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.316064 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.316104 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.316111 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.316124 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.316134 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:42Z","lastTransitionTime":"2025-12-04T17:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.418924 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.418976 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.418989 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.419004 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.419016 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:42Z","lastTransitionTime":"2025-12-04T17:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.521553 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.521601 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.521609 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.521652 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.521663 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:42Z","lastTransitionTime":"2025-12-04T17:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.624785 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.624906 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.624922 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.624944 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.624957 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:42Z","lastTransitionTime":"2025-12-04T17:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.728568 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.728652 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.728673 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.729295 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.729339 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:42Z","lastTransitionTime":"2025-12-04T17:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.832308 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.832355 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.832389 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.832415 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.832430 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:42Z","lastTransitionTime":"2025-12-04T17:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.936921 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.936987 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.937001 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.937020 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:42 crc kubenswrapper[4631]: I1204 17:28:42.937035 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:42Z","lastTransitionTime":"2025-12-04T17:28:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.039984 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.040037 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.040047 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.040065 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.040076 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:43Z","lastTransitionTime":"2025-12-04T17:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.143748 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.143803 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.143819 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.143880 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.143901 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:43Z","lastTransitionTime":"2025-12-04T17:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.247220 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.247259 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.247271 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.247287 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.247298 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:43Z","lastTransitionTime":"2025-12-04T17:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.350096 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.350154 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.350173 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.350201 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.350221 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:43Z","lastTransitionTime":"2025-12-04T17:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.454715 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.454765 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.454780 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.454796 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.454807 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:43Z","lastTransitionTime":"2025-12-04T17:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.558436 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.558502 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.558519 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.558541 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.558557 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:43Z","lastTransitionTime":"2025-12-04T17:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.661355 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.661423 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.661434 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.661450 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.661459 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:43Z","lastTransitionTime":"2025-12-04T17:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.763630 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.763669 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.763680 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.763698 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.763709 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:43Z","lastTransitionTime":"2025-12-04T17:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.852839 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.852910 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.852931 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.852949 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.852960 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:43Z","lastTransitionTime":"2025-12-04T17:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:43 crc kubenswrapper[4631]: E1204 17:28:43.866336 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:43Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.871045 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.871088 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.871099 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.871116 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.871127 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:43Z","lastTransitionTime":"2025-12-04T17:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:43 crc kubenswrapper[4631]: E1204 17:28:43.883654 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:43Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.888330 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.888408 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.888430 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.888452 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.888469 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:43Z","lastTransitionTime":"2025-12-04T17:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:43 crc kubenswrapper[4631]: E1204 17:28:43.902864 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:43Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.907425 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.907497 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.907516 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.907537 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.907551 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:43Z","lastTransitionTime":"2025-12-04T17:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:43 crc kubenswrapper[4631]: E1204 17:28:43.923549 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:43Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.929300 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.929412 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.929432 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.929459 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.929480 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:43Z","lastTransitionTime":"2025-12-04T17:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:43 crc kubenswrapper[4631]: E1204 17:28:43.942992 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:43Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:43 crc kubenswrapper[4631]: E1204 17:28:43.943257 4631 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.945052 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.945101 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.945112 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.945132 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:43 crc kubenswrapper[4631]: I1204 17:28:43.945146 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:43Z","lastTransitionTime":"2025-12-04T17:28:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.049092 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.049172 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.049191 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.049224 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.049246 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:44Z","lastTransitionTime":"2025-12-04T17:28:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.152807 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.152868 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.152879 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.152899 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.152911 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:44Z","lastTransitionTime":"2025-12-04T17:28:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.238785 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.238927 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.238976 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:44 crc kubenswrapper[4631]: E1204 17:28:44.239009 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.239048 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:44 crc kubenswrapper[4631]: E1204 17:28:44.239150 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:44 crc kubenswrapper[4631]: E1204 17:28:44.239331 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:28:44 crc kubenswrapper[4631]: E1204 17:28:44.239490 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.255689 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.255768 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.255795 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.255832 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.255859 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:44Z","lastTransitionTime":"2025-12-04T17:28:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.358121 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.358617 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.358752 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.358879 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.359005 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:44Z","lastTransitionTime":"2025-12-04T17:28:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.462602 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.462686 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.462706 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.462738 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.462760 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:44Z","lastTransitionTime":"2025-12-04T17:28:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.566739 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.566843 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.566855 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.566875 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.566886 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:44Z","lastTransitionTime":"2025-12-04T17:28:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.670443 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.671068 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.671205 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.671349 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.671528 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:44Z","lastTransitionTime":"2025-12-04T17:28:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.775201 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.776026 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.776168 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.776315 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.776472 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:44Z","lastTransitionTime":"2025-12-04T17:28:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.879760 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.879821 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.879831 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.879852 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.879863 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:44Z","lastTransitionTime":"2025-12-04T17:28:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.983090 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.983204 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.983216 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.983234 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:44 crc kubenswrapper[4631]: I1204 17:28:44.983248 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:44Z","lastTransitionTime":"2025-12-04T17:28:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.085566 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.085605 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.085614 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.085631 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.085644 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:45Z","lastTransitionTime":"2025-12-04T17:28:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.188529 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.188584 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.188595 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.188616 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.188629 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:45Z","lastTransitionTime":"2025-12-04T17:28:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.292367 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.292475 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.292499 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.292597 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.292710 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:45Z","lastTransitionTime":"2025-12-04T17:28:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.396578 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.396635 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.396649 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.396673 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.396690 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:45Z","lastTransitionTime":"2025-12-04T17:28:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.500691 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.500737 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.500749 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.500768 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.500782 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:45Z","lastTransitionTime":"2025-12-04T17:28:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.603804 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.603863 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.603876 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.603894 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.603904 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:45Z","lastTransitionTime":"2025-12-04T17:28:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.706885 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.706947 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.706961 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.706984 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.706999 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:45Z","lastTransitionTime":"2025-12-04T17:28:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.810577 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.810671 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.810686 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.810712 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.810730 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:45Z","lastTransitionTime":"2025-12-04T17:28:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.913235 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.913291 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.913306 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.913323 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:45 crc kubenswrapper[4631]: I1204 17:28:45.913335 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:45Z","lastTransitionTime":"2025-12-04T17:28:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.015315 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.015357 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.015380 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.015399 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.015417 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:46Z","lastTransitionTime":"2025-12-04T17:28:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.118656 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.118750 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.118823 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.118862 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.118891 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:46Z","lastTransitionTime":"2025-12-04T17:28:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.221226 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.221301 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.221335 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.221367 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.221418 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:46Z","lastTransitionTime":"2025-12-04T17:28:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.238939 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.238939 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:46 crc kubenswrapper[4631]: E1204 17:28:46.239160 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.238946 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:46 crc kubenswrapper[4631]: E1204 17:28:46.239432 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.239466 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:46 crc kubenswrapper[4631]: E1204 17:28:46.239605 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:46 crc kubenswrapper[4631]: E1204 17:28:46.239736 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.325327 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.325447 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.325472 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.325512 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.325537 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:46Z","lastTransitionTime":"2025-12-04T17:28:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.428131 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.428177 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.428188 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.428207 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.428218 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:46Z","lastTransitionTime":"2025-12-04T17:28:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.531473 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.531513 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.531523 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.531541 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.531553 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:46Z","lastTransitionTime":"2025-12-04T17:28:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.588155 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs\") pod \"network-metrics-daemon-8kcrj\" (UID: \"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\") " pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:46 crc kubenswrapper[4631]: E1204 17:28:46.588461 4631 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 17:28:46 crc kubenswrapper[4631]: E1204 17:28:46.588641 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs podName:86903bd1-674d-4fa2-b9d1-dbc8f347b72b nodeName:}" failed. No retries permitted until 2025-12-04 17:29:02.588596108 +0000 UTC m=+72.620838146 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs") pod "network-metrics-daemon-8kcrj" (UID: "86903bd1-674d-4fa2-b9d1-dbc8f347b72b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.634629 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.634680 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.634694 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.634720 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.634737 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:46Z","lastTransitionTime":"2025-12-04T17:28:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.738291 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.738367 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.738432 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.738468 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.738494 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:46Z","lastTransitionTime":"2025-12-04T17:28:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.842234 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.842294 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.842306 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.842323 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.842336 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:46Z","lastTransitionTime":"2025-12-04T17:28:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.946178 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.946285 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.946308 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.946342 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:46 crc kubenswrapper[4631]: I1204 17:28:46.946405 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:46Z","lastTransitionTime":"2025-12-04T17:28:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.050353 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.050453 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.050473 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.050503 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.050529 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:47Z","lastTransitionTime":"2025-12-04T17:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.153358 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.153751 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.153967 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.154087 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.154196 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:47Z","lastTransitionTime":"2025-12-04T17:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.259667 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.259958 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.260028 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.260100 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.260164 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:47Z","lastTransitionTime":"2025-12-04T17:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.363309 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.363756 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.363864 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.363983 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.364065 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:47Z","lastTransitionTime":"2025-12-04T17:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.466780 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.467128 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.467206 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.467273 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.467342 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:47Z","lastTransitionTime":"2025-12-04T17:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.570663 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.571095 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.571231 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.571340 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.571472 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:47Z","lastTransitionTime":"2025-12-04T17:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.674074 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.674133 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.674146 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.674168 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.674182 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:47Z","lastTransitionTime":"2025-12-04T17:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.776748 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.776795 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.776805 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.776821 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.776832 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:47Z","lastTransitionTime":"2025-12-04T17:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.879622 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.879733 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.879747 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.879766 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.879782 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:47Z","lastTransitionTime":"2025-12-04T17:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.983666 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.984150 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.984250 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.984415 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:47 crc kubenswrapper[4631]: I1204 17:28:47.984558 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:47Z","lastTransitionTime":"2025-12-04T17:28:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.018042 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:28:48 crc kubenswrapper[4631]: E1204 17:28:48.018165 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:29:20.018142226 +0000 UTC m=+90.050384224 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.087978 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.088334 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.088436 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.088545 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.088618 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:48Z","lastTransitionTime":"2025-12-04T17:28:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.119946 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:48 crc kubenswrapper[4631]: E1204 17:28:48.120231 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 17:28:48 crc kubenswrapper[4631]: E1204 17:28:48.120315 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.120252 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.120431 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.120475 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:48 crc kubenswrapper[4631]: E1204 17:28:48.120341 4631 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:48 crc kubenswrapper[4631]: E1204 17:28:48.120619 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 17:29:20.120592534 +0000 UTC m=+90.152834552 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:48 crc kubenswrapper[4631]: E1204 17:28:48.120655 4631 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 17:28:48 crc kubenswrapper[4631]: E1204 17:28:48.120684 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 17:28:48 crc kubenswrapper[4631]: E1204 17:28:48.120714 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 17:28:48 crc kubenswrapper[4631]: E1204 17:28:48.120721 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 17:29:20.120702227 +0000 UTC m=+90.152944435 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 17:28:48 crc kubenswrapper[4631]: E1204 17:28:48.120736 4631 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:48 crc kubenswrapper[4631]: E1204 17:28:48.120822 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 17:29:20.12079727 +0000 UTC m=+90.153039448 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:28:48 crc kubenswrapper[4631]: E1204 17:28:48.121008 4631 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 17:28:48 crc kubenswrapper[4631]: E1204 17:28:48.121117 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 17:29:20.121107559 +0000 UTC m=+90.153349557 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.191933 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.191991 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.192006 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.192024 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.192037 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:48Z","lastTransitionTime":"2025-12-04T17:28:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.238608 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.238660 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:48 crc kubenswrapper[4631]: E1204 17:28:48.238767 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:28:48 crc kubenswrapper[4631]: E1204 17:28:48.238841 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.238937 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.239158 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:48 crc kubenswrapper[4631]: E1204 17:28:48.239168 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:48 crc kubenswrapper[4631]: E1204 17:28:48.239560 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.295962 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.296005 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.296015 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.296030 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.296045 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:48Z","lastTransitionTime":"2025-12-04T17:28:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.398699 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.398744 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.398756 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.398769 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.398778 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:48Z","lastTransitionTime":"2025-12-04T17:28:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.502814 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.503314 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.503338 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.503367 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.503419 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:48Z","lastTransitionTime":"2025-12-04T17:28:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.605934 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.605974 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.605989 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.606013 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.606025 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:48Z","lastTransitionTime":"2025-12-04T17:28:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.708653 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.708687 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.708696 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.708712 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.708722 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:48Z","lastTransitionTime":"2025-12-04T17:28:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.811278 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.811342 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.811353 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.811388 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.811401 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:48Z","lastTransitionTime":"2025-12-04T17:28:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.913849 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.914469 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.914485 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.914501 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:48 crc kubenswrapper[4631]: I1204 17:28:48.914512 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:48Z","lastTransitionTime":"2025-12-04T17:28:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.017790 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.017841 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.017851 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.017867 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.017881 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:49Z","lastTransitionTime":"2025-12-04T17:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.120693 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.120725 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.120733 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.120746 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.120755 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:49Z","lastTransitionTime":"2025-12-04T17:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.223841 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.223887 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.223898 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.223916 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.223928 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:49Z","lastTransitionTime":"2025-12-04T17:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.326485 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.326523 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.326533 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.326549 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.326559 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:49Z","lastTransitionTime":"2025-12-04T17:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.429573 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.429616 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.429624 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.429638 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.429648 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:49Z","lastTransitionTime":"2025-12-04T17:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.532095 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.532130 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.532139 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.532154 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.532163 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:49Z","lastTransitionTime":"2025-12-04T17:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.634504 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.634579 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.634591 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.634609 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.634622 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:49Z","lastTransitionTime":"2025-12-04T17:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.736824 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.736863 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.736874 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.736894 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.736907 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:49Z","lastTransitionTime":"2025-12-04T17:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.839603 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.839666 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.839681 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.839703 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.839721 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:49Z","lastTransitionTime":"2025-12-04T17:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.944680 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.944744 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.944756 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.944775 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:49 crc kubenswrapper[4631]: I1204 17:28:49.944788 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:49Z","lastTransitionTime":"2025-12-04T17:28:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.047965 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.048014 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.048027 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.048049 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.048077 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:50Z","lastTransitionTime":"2025-12-04T17:28:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.152077 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.152129 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.152142 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.152165 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.152179 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:50Z","lastTransitionTime":"2025-12-04T17:28:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.238358 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:50 crc kubenswrapper[4631]: E1204 17:28:50.238499 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.238776 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.238840 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.238978 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:50 crc kubenswrapper[4631]: E1204 17:28:50.239570 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:50 crc kubenswrapper[4631]: E1204 17:28:50.239786 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:50 crc kubenswrapper[4631]: E1204 17:28:50.239854 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.254875 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.254920 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.254934 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.254957 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.254970 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:50Z","lastTransitionTime":"2025-12-04T17:28:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.256034 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:50Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.272028 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:50Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.286267 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:50Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.297727 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:50Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.314010 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:50Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.328694 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:50Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.344202 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:50Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.357798 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.358093 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.358169 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.358235 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.358292 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:50Z","lastTransitionTime":"2025-12-04T17:28:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.364298 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:50Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.376812 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:50Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.390796 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://712c54ce6d416f06964e2a50bf902d2c18b7bf713a9d67265bfbadc2fbf4ee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96709e063c20b40adae03f18364ec6297f144e6454670c5ab760f150837e165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:50Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.407498 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:50Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.422977 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:50Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.448259 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:50Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.461471 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.461514 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.461525 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.461548 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.461564 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:50Z","lastTransitionTime":"2025-12-04T17:28:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.464518 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:50Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.483829 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"message\\\":\\\"d/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:32.456090 5968 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1204 17:28:32.456122 5968 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:32.456156 5968 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 17:28:32.456172 5968 factory.go:656] Stopping watch factory\\\\nI1204 17:28:32.456188 5968 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 17:28:32.456195 5968 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1204 17:28:32.456272 5968 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1204 17:28:32.502496 5968 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1204 17:28:32.502546 5968 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1204 17:28:32.502608 5968 ovnkube.go:599] Stopped ovnkube\\\\nI1204 17:28:32.502641 5968 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 17:28:32.502761 5968 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:50Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.495441 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8kcrj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:50Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.564957 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.565365 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.565498 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.565598 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.565692 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:50Z","lastTransitionTime":"2025-12-04T17:28:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.668552 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.668910 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.668978 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.669066 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.669154 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:50Z","lastTransitionTime":"2025-12-04T17:28:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.772043 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.772439 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.772622 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.772848 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.773048 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:50Z","lastTransitionTime":"2025-12-04T17:28:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.877361 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.877484 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.877504 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.877541 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.877563 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:50Z","lastTransitionTime":"2025-12-04T17:28:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.982291 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.982412 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.982432 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.982491 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:50 crc kubenswrapper[4631]: I1204 17:28:50.982518 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:50Z","lastTransitionTime":"2025-12-04T17:28:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.086517 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.086593 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.086615 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.086645 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.086665 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:51Z","lastTransitionTime":"2025-12-04T17:28:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.190851 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.191324 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.191708 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.191944 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.192164 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:51Z","lastTransitionTime":"2025-12-04T17:28:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.240543 4631 scope.go:117] "RemoveContainer" containerID="8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.296292 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.296362 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.296396 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.296416 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.296459 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:51Z","lastTransitionTime":"2025-12-04T17:28:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.399278 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.399861 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.399870 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.399887 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.399902 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:51Z","lastTransitionTime":"2025-12-04T17:28:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.502954 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.503063 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.503089 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.503120 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.503144 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:51Z","lastTransitionTime":"2025-12-04T17:28:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.606875 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.606948 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.606969 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.606996 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.607014 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:51Z","lastTransitionTime":"2025-12-04T17:28:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.710449 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.710687 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.710752 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.710825 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.710905 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:51Z","lastTransitionTime":"2025-12-04T17:28:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.813223 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.813286 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.813304 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.813331 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.813350 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:51Z","lastTransitionTime":"2025-12-04T17:28:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.915806 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.916092 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.916169 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.916271 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:51 crc kubenswrapper[4631]: I1204 17:28:51.916349 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:51Z","lastTransitionTime":"2025-12-04T17:28:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.019424 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.019481 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.019494 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.019514 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.019527 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:52Z","lastTransitionTime":"2025-12-04T17:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.122423 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.122485 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.122499 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.122521 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.122536 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:52Z","lastTransitionTime":"2025-12-04T17:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.225839 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.225879 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.225889 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.225937 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.225948 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:52Z","lastTransitionTime":"2025-12-04T17:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.241561 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:52 crc kubenswrapper[4631]: E1204 17:28:52.241714 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.241946 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:52 crc kubenswrapper[4631]: E1204 17:28:52.241995 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.242113 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:52 crc kubenswrapper[4631]: E1204 17:28:52.242171 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.242282 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:52 crc kubenswrapper[4631]: E1204 17:28:52.242344 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.330569 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.330618 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.330628 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.330648 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.330660 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:52Z","lastTransitionTime":"2025-12-04T17:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.432981 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.433023 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.433032 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.433046 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.433057 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:52Z","lastTransitionTime":"2025-12-04T17:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.535469 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.535499 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.535508 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.535520 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.535529 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:52Z","lastTransitionTime":"2025-12-04T17:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.638040 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.638091 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.638102 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.638146 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.638158 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:52Z","lastTransitionTime":"2025-12-04T17:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.740238 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.740280 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.740313 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.740331 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.740343 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:52Z","lastTransitionTime":"2025-12-04T17:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.843072 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.843111 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.843123 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.843145 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.843160 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:52Z","lastTransitionTime":"2025-12-04T17:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.946022 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.946068 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.946079 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.946096 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:52 crc kubenswrapper[4631]: I1204 17:28:52.946107 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:52Z","lastTransitionTime":"2025-12-04T17:28:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.031645 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovnkube-controller/1.log" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.035750 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerStarted","Data":"440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866"} Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.037010 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.063943 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.063992 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.064004 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.064020 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.064029 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:53Z","lastTransitionTime":"2025-12-04T17:28:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.068870 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.070119 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.090056 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.090324 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.102864 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.121636 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.135171 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.149659 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://712c54ce6d416f06964e2a50bf902d2c18b7bf713a9d67265bfbadc2fbf4ee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96709e063c20b40adae03f18364ec6297f144e6454670c5ab760f150837e165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.167214 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.167255 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.167281 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.167302 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.167315 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:53Z","lastTransitionTime":"2025-12-04T17:28:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.172426 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"message\\\":\\\"d/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:32.456090 5968 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1204 17:28:32.456122 5968 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:32.456156 5968 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 17:28:32.456172 5968 factory.go:656] Stopping watch factory\\\\nI1204 17:28:32.456188 5968 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 17:28:32.456195 5968 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1204 17:28:32.456272 5968 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1204 17:28:32.502496 5968 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1204 17:28:32.502546 5968 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1204 17:28:32.502608 5968 ovnkube.go:599] Stopped ovnkube\\\\nI1204 17:28:32.502641 5968 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 17:28:32.502761 5968 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.193030 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8kcrj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.233599 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.269086 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.270177 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.270221 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.270232 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.270252 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.270272 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:53Z","lastTransitionTime":"2025-12-04T17:28:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.282944 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.294773 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.306058 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.318794 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.330581 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.346455 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.360614 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.372876 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.372914 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.372925 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.372941 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.372951 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:53Z","lastTransitionTime":"2025-12-04T17:28:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.375627 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.389853 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf70493c-4094-4783-939d-ac61051c83ac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2232c209adbed3161e31474c06d86e5184985a392f0e7f7c115889bf80ad5266\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17ca1bf820b90d0dcab27c3f59301d671ae001940de1b634174e371a77a5208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b24dbe6d9b284dd49778fcde6a5d7b665e2fca8181a7cb11c2d14028da9fb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.405971 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.423593 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.434323 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.443916 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://712c54ce6d416f06964e2a50bf902d2c18b7bf713a9d67265bfbadc2fbf4ee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96709e063c20b40adae03f18364ec6297f144e6454670c5ab760f150837e165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.456448 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.468454 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.475330 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.475384 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.475396 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.475415 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.475426 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:53Z","lastTransitionTime":"2025-12-04T17:28:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.480584 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.496208 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.507304 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.516651 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.534981 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"message\\\":\\\"d/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:32.456090 5968 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1204 17:28:32.456122 5968 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:32.456156 5968 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 17:28:32.456172 5968 factory.go:656] Stopping watch factory\\\\nI1204 17:28:32.456188 5968 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 17:28:32.456195 5968 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1204 17:28:32.456272 5968 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1204 17:28:32.502496 5968 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1204 17:28:32.502546 5968 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1204 17:28:32.502608 5968 ovnkube.go:599] Stopped ovnkube\\\\nI1204 17:28:32.502641 5968 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 17:28:32.502761 5968 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.546000 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8kcrj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.561325 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.574051 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:53Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.578033 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.578100 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.578114 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.578135 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.578152 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:53Z","lastTransitionTime":"2025-12-04T17:28:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.680807 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.680872 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.680883 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.680901 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.680914 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:53Z","lastTransitionTime":"2025-12-04T17:28:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.784090 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.784272 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.784286 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.784300 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.784310 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:53Z","lastTransitionTime":"2025-12-04T17:28:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.886673 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.886711 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.886720 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.886734 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.886743 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:53Z","lastTransitionTime":"2025-12-04T17:28:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.989962 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.989998 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.990008 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.990023 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:53 crc kubenswrapper[4631]: I1204 17:28:53.990036 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:53Z","lastTransitionTime":"2025-12-04T17:28:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.042240 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovnkube-controller/2.log" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.043231 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovnkube-controller/1.log" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.047130 4631 generic.go:334] "Generic (PLEG): container finished" podID="0d617abc-dc04-4807-b684-3640cde38e81" containerID="440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866" exitCode=1 Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.047503 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerDied","Data":"440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866"} Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.047632 4631 scope.go:117] "RemoveContainer" containerID="8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.048273 4631 scope.go:117] "RemoveContainer" containerID="440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866" Dec 04 17:28:54 crc kubenswrapper[4631]: E1204 17:28:54.048517 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podUID="0d617abc-dc04-4807-b684-3640cde38e81" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.064192 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.075205 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.090411 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://712c54ce6d416f06964e2a50bf902d2c18b7bf713a9d67265bfbadc2fbf4ee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96709e063c20b40adae03f18364ec6297f144e6454670c5ab760f150837e165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.093454 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.093512 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.093526 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.093550 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.093576 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:54Z","lastTransitionTime":"2025-12-04T17:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.106593 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.123038 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.135862 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.148132 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.169913 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a8fc0e9edc00e1ec794ac19f971843eac04082ac1887f600e9dc8ef5acf5c4c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"message\\\":\\\"d/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:32.456090 5968 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1204 17:28:32.456122 5968 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1204 17:28:32.456156 5968 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1204 17:28:32.456172 5968 factory.go:656] Stopping watch factory\\\\nI1204 17:28:32.456188 5968 handler.go:208] Removed *v1.Node event handler 2\\\\nI1204 17:28:32.456195 5968 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1204 17:28:32.456272 5968 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1204 17:28:32.502496 5968 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1204 17:28:32.502546 5968 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1204 17:28:32.502608 5968 ovnkube.go:599] Stopped ovnkube\\\\nI1204 17:28:32.502641 5968 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 17:28:32.502761 5968 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"message\\\":\\\"perations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/api]} name:Service_openshift-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 17:28:52.896842 6203 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/api]} name:Service_openshift-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 17:28:52.895898 6203 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 17:28:52.897121 6203 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.184045 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8kcrj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.196035 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.196096 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.196109 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.196147 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.196160 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:54Z","lastTransitionTime":"2025-12-04T17:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.200335 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.216644 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.233294 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.238510 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.238538 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.238548 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.238595 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:54 crc kubenswrapper[4631]: E1204 17:28:54.238688 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:54 crc kubenswrapper[4631]: E1204 17:28:54.238820 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:28:54 crc kubenswrapper[4631]: E1204 17:28:54.238930 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:54 crc kubenswrapper[4631]: E1204 17:28:54.239050 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.249154 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.260820 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.273229 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.290547 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf70493c-4094-4783-939d-ac61051c83ac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2232c209adbed3161e31474c06d86e5184985a392f0e7f7c115889bf80ad5266\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17ca1bf820b90d0dcab27c3f59301d671ae001940de1b634174e371a77a5208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b24dbe6d9b284dd49778fcde6a5d7b665e2fca8181a7cb11c2d14028da9fb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.298957 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.299004 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.299017 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.299040 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.299054 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:54Z","lastTransitionTime":"2025-12-04T17:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.304223 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.331643 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.331694 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.331713 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.331740 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.331758 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:54Z","lastTransitionTime":"2025-12-04T17:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:54 crc kubenswrapper[4631]: E1204 17:28:54.347782 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.352723 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.352780 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.352792 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.352817 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.352829 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:54Z","lastTransitionTime":"2025-12-04T17:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:54 crc kubenswrapper[4631]: E1204 17:28:54.368783 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.372938 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.372966 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.372979 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.372997 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.373011 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:54Z","lastTransitionTime":"2025-12-04T17:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:54 crc kubenswrapper[4631]: E1204 17:28:54.386957 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.391301 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.391332 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.391342 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.391361 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.391388 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:54Z","lastTransitionTime":"2025-12-04T17:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:54 crc kubenswrapper[4631]: E1204 17:28:54.406444 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.411741 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.411780 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.411788 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.411804 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.411814 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:54Z","lastTransitionTime":"2025-12-04T17:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:54 crc kubenswrapper[4631]: E1204 17:28:54.428419 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:54Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:54 crc kubenswrapper[4631]: E1204 17:28:54.429033 4631 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.432925 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.433124 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.433215 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.433309 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.433498 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:54Z","lastTransitionTime":"2025-12-04T17:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.536023 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.536310 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.536387 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.536492 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.536592 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:54Z","lastTransitionTime":"2025-12-04T17:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.639880 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.639919 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.639933 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.639953 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.639966 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:54Z","lastTransitionTime":"2025-12-04T17:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.742735 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.742791 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.742803 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.742824 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.742839 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:54Z","lastTransitionTime":"2025-12-04T17:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.846028 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.846072 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.846081 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.846098 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.846108 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:54Z","lastTransitionTime":"2025-12-04T17:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.949333 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.949417 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.949434 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.949453 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:54 crc kubenswrapper[4631]: I1204 17:28:54.949465 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:54Z","lastTransitionTime":"2025-12-04T17:28:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.051510 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.051563 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.051577 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.051597 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.051611 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:55Z","lastTransitionTime":"2025-12-04T17:28:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.053966 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovnkube-controller/2.log" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.057617 4631 scope.go:117] "RemoveContainer" containerID="440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866" Dec 04 17:28:55 crc kubenswrapper[4631]: E1204 17:28:55.057919 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podUID="0d617abc-dc04-4807-b684-3640cde38e81" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.072760 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:55Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.085557 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:55Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.098596 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://712c54ce6d416f06964e2a50bf902d2c18b7bf713a9d67265bfbadc2fbf4ee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96709e063c20b40adae03f18364ec6297f144e6454670c5ab760f150837e165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:55Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.114050 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:55Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.135503 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:55Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.148087 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:55Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.154799 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.154852 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.154866 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.154887 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.154922 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:55Z","lastTransitionTime":"2025-12-04T17:28:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.158568 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:55Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.175546 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"message\\\":\\\"perations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/api]} name:Service_openshift-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 17:28:52.896842 6203 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/api]} name:Service_openshift-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 17:28:52.895898 6203 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 17:28:52.897121 6203 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:55Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.185709 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8kcrj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:55Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.197037 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:55Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.211253 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:55Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.225306 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:55Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.237892 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:55Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.252136 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:55Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.263963 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.264002 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.264034 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.264055 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.264072 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:55Z","lastTransitionTime":"2025-12-04T17:28:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.268344 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:55Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.282394 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf70493c-4094-4783-939d-ac61051c83ac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2232c209adbed3161e31474c06d86e5184985a392f0e7f7c115889bf80ad5266\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17ca1bf820b90d0dcab27c3f59301d671ae001940de1b634174e371a77a5208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b24dbe6d9b284dd49778fcde6a5d7b665e2fca8181a7cb11c2d14028da9fb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:55Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.295909 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:28:55Z is after 2025-08-24T17:21:41Z" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.366629 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.366707 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.366718 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.366739 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.366768 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:55Z","lastTransitionTime":"2025-12-04T17:28:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.469329 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.469389 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.469403 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.469420 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.469432 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:55Z","lastTransitionTime":"2025-12-04T17:28:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.572310 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.572408 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.572423 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.572447 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.572464 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:55Z","lastTransitionTime":"2025-12-04T17:28:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.675664 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.675724 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.675735 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.675750 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.675762 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:55Z","lastTransitionTime":"2025-12-04T17:28:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.778792 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.778870 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.778883 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.778903 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.778916 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:55Z","lastTransitionTime":"2025-12-04T17:28:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.898951 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.899000 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.899010 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.899029 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:55 crc kubenswrapper[4631]: I1204 17:28:55.899039 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:55Z","lastTransitionTime":"2025-12-04T17:28:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.001709 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.001771 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.001785 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.001806 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.001819 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:56Z","lastTransitionTime":"2025-12-04T17:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.104959 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.105003 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.105012 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.105028 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.105042 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:56Z","lastTransitionTime":"2025-12-04T17:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.208152 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.208208 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.208218 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.208236 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.208260 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:56Z","lastTransitionTime":"2025-12-04T17:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.239280 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.239320 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.239439 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.239489 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:56 crc kubenswrapper[4631]: E1204 17:28:56.239453 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:28:56 crc kubenswrapper[4631]: E1204 17:28:56.239571 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:56 crc kubenswrapper[4631]: E1204 17:28:56.239735 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:56 crc kubenswrapper[4631]: E1204 17:28:56.239814 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.311187 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.311257 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.311270 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.311290 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.311306 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:56Z","lastTransitionTime":"2025-12-04T17:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.413685 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.413759 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.413770 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.413805 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.413816 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:56Z","lastTransitionTime":"2025-12-04T17:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.515986 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.516030 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.516041 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.516060 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.516072 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:56Z","lastTransitionTime":"2025-12-04T17:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.618087 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.618127 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.618136 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.618149 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.618159 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:56Z","lastTransitionTime":"2025-12-04T17:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.721397 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.721457 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.721474 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.721502 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.721522 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:56Z","lastTransitionTime":"2025-12-04T17:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.823526 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.823572 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.823586 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.823608 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.823621 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:56Z","lastTransitionTime":"2025-12-04T17:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.926362 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.926420 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.926430 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.926450 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:56 crc kubenswrapper[4631]: I1204 17:28:56.926459 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:56Z","lastTransitionTime":"2025-12-04T17:28:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.029553 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.029605 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.029620 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.029641 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.029654 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:57Z","lastTransitionTime":"2025-12-04T17:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.131873 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.131924 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.131936 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.131972 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.131987 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:57Z","lastTransitionTime":"2025-12-04T17:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.235064 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.235112 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.235127 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.235148 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.235164 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:57Z","lastTransitionTime":"2025-12-04T17:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.337827 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.337874 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.337886 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.337903 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.337916 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:57Z","lastTransitionTime":"2025-12-04T17:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.441688 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.441760 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.441773 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.441799 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.441816 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:57Z","lastTransitionTime":"2025-12-04T17:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.544739 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.544798 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.544808 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.544827 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.544837 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:57Z","lastTransitionTime":"2025-12-04T17:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.647674 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.647725 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.647746 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.647767 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.647781 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:57Z","lastTransitionTime":"2025-12-04T17:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.750184 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.750234 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.750246 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.750262 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.750274 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:57Z","lastTransitionTime":"2025-12-04T17:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.852324 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.852442 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.852456 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.852478 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.852490 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:57Z","lastTransitionTime":"2025-12-04T17:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.954798 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.954833 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.954842 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.954854 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:57 crc kubenswrapper[4631]: I1204 17:28:57.954866 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:57Z","lastTransitionTime":"2025-12-04T17:28:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.057833 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.057877 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.057890 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.057907 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.057921 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:58Z","lastTransitionTime":"2025-12-04T17:28:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.160810 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.160856 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.160865 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.160880 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.160889 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:58Z","lastTransitionTime":"2025-12-04T17:28:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.240559 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.240658 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.240765 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:28:58 crc kubenswrapper[4631]: E1204 17:28:58.240824 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:28:58 crc kubenswrapper[4631]: E1204 17:28:58.240680 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.240918 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:28:58 crc kubenswrapper[4631]: E1204 17:28:58.241112 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:28:58 crc kubenswrapper[4631]: E1204 17:28:58.241069 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.264548 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.264619 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.264636 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.264663 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.264681 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:58Z","lastTransitionTime":"2025-12-04T17:28:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.368502 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.368822 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.368919 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.369021 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.369191 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:58Z","lastTransitionTime":"2025-12-04T17:28:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.472273 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.472318 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.472334 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.472353 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.472366 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:58Z","lastTransitionTime":"2025-12-04T17:28:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.575502 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.575584 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.575597 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.575617 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.575631 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:58Z","lastTransitionTime":"2025-12-04T17:28:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.678088 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.678162 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.678178 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.678202 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.678222 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:58Z","lastTransitionTime":"2025-12-04T17:28:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.781061 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.781507 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.781619 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.781720 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.781809 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:58Z","lastTransitionTime":"2025-12-04T17:28:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.885336 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.885652 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.885766 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.885838 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.885895 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:58Z","lastTransitionTime":"2025-12-04T17:28:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.988726 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.988752 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.988760 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.988772 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:58 crc kubenswrapper[4631]: I1204 17:28:58.988780 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:58Z","lastTransitionTime":"2025-12-04T17:28:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.091674 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.091744 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.091758 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.091777 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.091789 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:59Z","lastTransitionTime":"2025-12-04T17:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.195110 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.195187 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.195212 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.195243 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.195267 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:59Z","lastTransitionTime":"2025-12-04T17:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.298431 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.298478 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.298490 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.298508 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.298520 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:59Z","lastTransitionTime":"2025-12-04T17:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.402017 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.402056 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.402068 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.402084 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.402094 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:59Z","lastTransitionTime":"2025-12-04T17:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.505672 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.505790 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.505805 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.505832 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.505849 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:59Z","lastTransitionTime":"2025-12-04T17:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.609396 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.609462 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.609477 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.609501 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.609519 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:59Z","lastTransitionTime":"2025-12-04T17:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.713052 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.713113 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.713132 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.713155 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.713175 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:59Z","lastTransitionTime":"2025-12-04T17:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.816265 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.816328 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.816341 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.816364 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.816405 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:59Z","lastTransitionTime":"2025-12-04T17:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.919469 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.919531 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.919546 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.919570 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:28:59 crc kubenswrapper[4631]: I1204 17:28:59.919585 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:28:59Z","lastTransitionTime":"2025-12-04T17:28:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.022976 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.023032 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.023044 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.023067 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.023085 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:00Z","lastTransitionTime":"2025-12-04T17:29:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.125967 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.126015 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.126057 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.126077 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.126091 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:00Z","lastTransitionTime":"2025-12-04T17:29:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.230455 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.230506 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.230517 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.230533 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.230545 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:00Z","lastTransitionTime":"2025-12-04T17:29:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.238757 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.238812 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.238812 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:00 crc kubenswrapper[4631]: E1204 17:29:00.238908 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.238983 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:00 crc kubenswrapper[4631]: E1204 17:29:00.239127 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:00 crc kubenswrapper[4631]: E1204 17:29:00.239118 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:00 crc kubenswrapper[4631]: E1204 17:29:00.239225 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.253357 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:00Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.273510 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"message\\\":\\\"perations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/api]} name:Service_openshift-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 17:28:52.896842 6203 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/api]} name:Service_openshift-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 17:28:52.895898 6203 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 17:28:52.897121 6203 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:00Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.285486 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8kcrj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:00Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.297638 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:00Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.311594 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:00Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.325575 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:00Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.333138 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.333181 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.333197 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.333218 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.333232 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:00Z","lastTransitionTime":"2025-12-04T17:29:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.343134 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:00Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.356487 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:00Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.371286 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:00Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.383799 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:00Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.395975 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf70493c-4094-4783-939d-ac61051c83ac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2232c209adbed3161e31474c06d86e5184985a392f0e7f7c115889bf80ad5266\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17ca1bf820b90d0dcab27c3f59301d671ae001940de1b634174e371a77a5208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b24dbe6d9b284dd49778fcde6a5d7b665e2fca8181a7cb11c2d14028da9fb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:00Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.408458 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:00Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.422303 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:00Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.434926 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:00Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.436595 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.436629 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.436668 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.436684 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.436696 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:00Z","lastTransitionTime":"2025-12-04T17:29:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.448941 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://712c54ce6d416f06964e2a50bf902d2c18b7bf713a9d67265bfbadc2fbf4ee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96709e063c20b40adae03f18364ec6297f144e6454670c5ab760f150837e165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:00Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.460192 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:00Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.483753 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:00Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.539010 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.539047 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.539056 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.539069 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.539078 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:00Z","lastTransitionTime":"2025-12-04T17:29:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.642043 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.642086 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.642099 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.642116 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.642129 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:00Z","lastTransitionTime":"2025-12-04T17:29:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.744502 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.744583 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.744596 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.744618 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.744630 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:00Z","lastTransitionTime":"2025-12-04T17:29:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.847665 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.847727 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.847737 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.847756 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.847768 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:00Z","lastTransitionTime":"2025-12-04T17:29:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.950963 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.951015 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.951028 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.951048 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:00 crc kubenswrapper[4631]: I1204 17:29:00.951061 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:00Z","lastTransitionTime":"2025-12-04T17:29:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.053105 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.053164 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.053176 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.053195 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.053206 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:01Z","lastTransitionTime":"2025-12-04T17:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.156754 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.156813 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.156831 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.156858 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.156880 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:01Z","lastTransitionTime":"2025-12-04T17:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.260420 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.260491 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.260510 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.260543 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.260571 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:01Z","lastTransitionTime":"2025-12-04T17:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.362516 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.362556 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.362565 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.362582 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.362591 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:01Z","lastTransitionTime":"2025-12-04T17:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.466082 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.466129 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.466137 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.466153 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.466166 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:01Z","lastTransitionTime":"2025-12-04T17:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.569154 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.569189 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.569197 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.569210 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.569223 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:01Z","lastTransitionTime":"2025-12-04T17:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.672210 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.672250 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.672261 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.672282 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.672294 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:01Z","lastTransitionTime":"2025-12-04T17:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.775611 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.775709 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.775735 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.775771 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.775804 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:01Z","lastTransitionTime":"2025-12-04T17:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.878694 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.878968 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.879032 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.879102 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.879165 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:01Z","lastTransitionTime":"2025-12-04T17:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.982755 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.983078 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.983152 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.983221 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:01 crc kubenswrapper[4631]: I1204 17:29:01.983293 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:01Z","lastTransitionTime":"2025-12-04T17:29:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.088220 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.088255 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.088264 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.088279 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.088297 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:02Z","lastTransitionTime":"2025-12-04T17:29:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.190272 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.190311 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.190324 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.190340 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.190352 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:02Z","lastTransitionTime":"2025-12-04T17:29:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.238949 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.239003 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.239032 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.238968 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:02 crc kubenswrapper[4631]: E1204 17:29:02.239164 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:02 crc kubenswrapper[4631]: E1204 17:29:02.239241 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:02 crc kubenswrapper[4631]: E1204 17:29:02.239311 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:02 crc kubenswrapper[4631]: E1204 17:29:02.239348 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.292651 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.292689 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.292702 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.292723 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.292739 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:02Z","lastTransitionTime":"2025-12-04T17:29:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.395281 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.395324 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.395333 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.395392 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.395403 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:02Z","lastTransitionTime":"2025-12-04T17:29:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.498260 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.498326 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.498336 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.498349 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.498358 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:02Z","lastTransitionTime":"2025-12-04T17:29:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.601427 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.601468 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.601479 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.601500 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.601513 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:02Z","lastTransitionTime":"2025-12-04T17:29:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.682598 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs\") pod \"network-metrics-daemon-8kcrj\" (UID: \"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\") " pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:02 crc kubenswrapper[4631]: E1204 17:29:02.682735 4631 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 17:29:02 crc kubenswrapper[4631]: E1204 17:29:02.682803 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs podName:86903bd1-674d-4fa2-b9d1-dbc8f347b72b nodeName:}" failed. No retries permitted until 2025-12-04 17:29:34.682785122 +0000 UTC m=+104.715027120 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs") pod "network-metrics-daemon-8kcrj" (UID: "86903bd1-674d-4fa2-b9d1-dbc8f347b72b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.703900 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.703945 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.703954 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.703967 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.703976 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:02Z","lastTransitionTime":"2025-12-04T17:29:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.806852 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.806899 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.806912 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.806927 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.806938 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:02Z","lastTransitionTime":"2025-12-04T17:29:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.908979 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.909027 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.909039 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.909056 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:02 crc kubenswrapper[4631]: I1204 17:29:02.909069 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:02Z","lastTransitionTime":"2025-12-04T17:29:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.011337 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.011428 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.011441 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.011462 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.011474 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:03Z","lastTransitionTime":"2025-12-04T17:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.114725 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.115072 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.115211 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.115352 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.115525 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:03Z","lastTransitionTime":"2025-12-04T17:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.219299 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.219327 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.219335 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.219349 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.219358 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:03Z","lastTransitionTime":"2025-12-04T17:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.322099 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.322153 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.322169 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.322197 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.322222 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:03Z","lastTransitionTime":"2025-12-04T17:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.428185 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.428629 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.428869 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.429409 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.429607 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:03Z","lastTransitionTime":"2025-12-04T17:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.532116 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.532154 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.532164 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.532179 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.532190 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:03Z","lastTransitionTime":"2025-12-04T17:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.634413 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.634951 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.635103 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.635270 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.635460 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:03Z","lastTransitionTime":"2025-12-04T17:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.738547 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.738598 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.738610 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.738630 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.738644 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:03Z","lastTransitionTime":"2025-12-04T17:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.841182 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.841767 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.841851 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.841922 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.841990 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:03Z","lastTransitionTime":"2025-12-04T17:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.944995 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.945457 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.945718 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.945928 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:03 crc kubenswrapper[4631]: I1204 17:29:03.946191 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:03Z","lastTransitionTime":"2025-12-04T17:29:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.049509 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.049576 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.049600 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.049633 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.049654 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:04Z","lastTransitionTime":"2025-12-04T17:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.090591 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zllp2_03e821a0-13d4-417c-9e54-7073b08490db/kube-multus/0.log" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.090660 4631 generic.go:334] "Generic (PLEG): container finished" podID="03e821a0-13d4-417c-9e54-7073b08490db" containerID="690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa" exitCode=1 Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.090692 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zllp2" event={"ID":"03e821a0-13d4-417c-9e54-7073b08490db","Type":"ContainerDied","Data":"690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa"} Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.091073 4631 scope.go:117] "RemoveContainer" containerID="690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.107782 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:29:03Z\\\",\\\"message\\\":\\\"2025-12-04T17:28:18+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7912288c-7bc7-4aeb-8144-7db1229d544b\\\\n2025-12-04T17:28:18+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7912288c-7bc7-4aeb-8144-7db1229d544b to /host/opt/cni/bin/\\\\n2025-12-04T17:28:18Z [verbose] multus-daemon started\\\\n2025-12-04T17:28:18Z [verbose] Readiness Indicator file check\\\\n2025-12-04T17:29:03Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.130873 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.146866 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf70493c-4094-4783-939d-ac61051c83ac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2232c209adbed3161e31474c06d86e5184985a392f0e7f7c115889bf80ad5266\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17ca1bf820b90d0dcab27c3f59301d671ae001940de1b634174e371a77a5208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b24dbe6d9b284dd49778fcde6a5d7b665e2fca8181a7cb11c2d14028da9fb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.152683 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.152735 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.152744 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.152759 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.152769 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:04Z","lastTransitionTime":"2025-12-04T17:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.160701 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.178583 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.192192 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.210223 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://712c54ce6d416f06964e2a50bf902d2c18b7bf713a9d67265bfbadc2fbf4ee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96709e063c20b40adae03f18364ec6297f144e6454670c5ab760f150837e165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.236466 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.238578 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.238669 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.238700 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:04 crc kubenswrapper[4631]: E1204 17:29:04.238800 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:04 crc kubenswrapper[4631]: E1204 17:29:04.238989 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:04 crc kubenswrapper[4631]: E1204 17:29:04.239166 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.239493 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:04 crc kubenswrapper[4631]: E1204 17:29:04.239607 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.255200 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.256036 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.256172 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.256301 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.256394 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.256492 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:04Z","lastTransitionTime":"2025-12-04T17:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.270540 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.290053 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.302327 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.314882 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.337790 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"message\\\":\\\"perations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/api]} name:Service_openshift-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 17:28:52.896842 6203 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/api]} name:Service_openshift-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 17:28:52.895898 6203 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 17:28:52.897121 6203 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.350797 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8kcrj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.359842 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.359888 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.359902 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.359924 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.359936 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:04Z","lastTransitionTime":"2025-12-04T17:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.366128 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.379967 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.463142 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.463207 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.463226 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.463251 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.463268 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:04Z","lastTransitionTime":"2025-12-04T17:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.566908 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.566962 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.566977 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.567003 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.567020 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:04Z","lastTransitionTime":"2025-12-04T17:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.670075 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.670184 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.670204 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.670259 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.670276 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:04Z","lastTransitionTime":"2025-12-04T17:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.675434 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.675518 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.675537 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.675556 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.675602 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:04Z","lastTransitionTime":"2025-12-04T17:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:04 crc kubenswrapper[4631]: E1204 17:29:04.688240 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.695759 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.695847 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.695898 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.695924 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.695940 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:04Z","lastTransitionTime":"2025-12-04T17:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:04 crc kubenswrapper[4631]: E1204 17:29:04.709449 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.713569 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.713610 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.713644 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.713660 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.713672 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:04Z","lastTransitionTime":"2025-12-04T17:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:04 crc kubenswrapper[4631]: E1204 17:29:04.728066 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.733593 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.733851 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.734051 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.734226 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.734418 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:04Z","lastTransitionTime":"2025-12-04T17:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:04 crc kubenswrapper[4631]: E1204 17:29:04.749829 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.754756 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.754865 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.754890 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.754947 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.754967 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:04Z","lastTransitionTime":"2025-12-04T17:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:04 crc kubenswrapper[4631]: E1204 17:29:04.780838 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:04Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:04 crc kubenswrapper[4631]: E1204 17:29:04.781294 4631 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.784669 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.784825 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.784911 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.785011 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.785105 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:04Z","lastTransitionTime":"2025-12-04T17:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.888876 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.888947 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.888964 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.888987 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.889005 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:04Z","lastTransitionTime":"2025-12-04T17:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.992108 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.992158 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.992168 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.992184 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:04 crc kubenswrapper[4631]: I1204 17:29:04.992193 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:04Z","lastTransitionTime":"2025-12-04T17:29:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.094648 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.095052 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.095256 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.095426 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.095825 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:05Z","lastTransitionTime":"2025-12-04T17:29:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.099020 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zllp2_03e821a0-13d4-417c-9e54-7073b08490db/kube-multus/0.log" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.099095 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zllp2" event={"ID":"03e821a0-13d4-417c-9e54-7073b08490db","Type":"ContainerStarted","Data":"efdd152e4738f125d721a6d044c0d96a378761e28d18f292c85706d0f3158f4f"} Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.114224 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:05Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.128787 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:05Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.140354 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:05Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.159540 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:05Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.170996 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:05Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.183108 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://712c54ce6d416f06964e2a50bf902d2c18b7bf713a9d67265bfbadc2fbf4ee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96709e063c20b40adae03f18364ec6297f144e6454670c5ab760f150837e165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:05Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.196207 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:05Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.198576 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.198609 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.198621 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.198639 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.198649 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:05Z","lastTransitionTime":"2025-12-04T17:29:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.226937 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"message\\\":\\\"perations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/api]} name:Service_openshift-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 17:28:52.896842 6203 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/api]} name:Service_openshift-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 17:28:52.895898 6203 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 17:28:52.897121 6203 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:05Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.242139 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8kcrj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:05Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.259817 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:05Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.274340 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:05Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.289868 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:05Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.301356 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.301446 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.301459 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.301475 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.301487 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:05Z","lastTransitionTime":"2025-12-04T17:29:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.305215 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf70493c-4094-4783-939d-ac61051c83ac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2232c209adbed3161e31474c06d86e5184985a392f0e7f7c115889bf80ad5266\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17ca1bf820b90d0dcab27c3f59301d671ae001940de1b634174e371a77a5208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b24dbe6d9b284dd49778fcde6a5d7b665e2fca8181a7cb11c2d14028da9fb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:05Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.319172 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:05Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.334497 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:05Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.348279 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:05Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.363359 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efdd152e4738f125d721a6d044c0d96a378761e28d18f292c85706d0f3158f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:29:03Z\\\",\\\"message\\\":\\\"2025-12-04T17:28:18+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7912288c-7bc7-4aeb-8144-7db1229d544b\\\\n2025-12-04T17:28:18+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7912288c-7bc7-4aeb-8144-7db1229d544b to /host/opt/cni/bin/\\\\n2025-12-04T17:28:18Z [verbose] multus-daemon started\\\\n2025-12-04T17:28:18Z [verbose] Readiness Indicator file check\\\\n2025-12-04T17:29:03Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:29:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:05Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.404816 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.404869 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.404885 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.404906 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.404919 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:05Z","lastTransitionTime":"2025-12-04T17:29:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.508235 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.508305 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.508321 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.508346 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.508364 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:05Z","lastTransitionTime":"2025-12-04T17:29:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.611739 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.612245 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.612473 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.612667 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.612838 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:05Z","lastTransitionTime":"2025-12-04T17:29:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.715739 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.715839 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.715875 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.715907 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.715928 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:05Z","lastTransitionTime":"2025-12-04T17:29:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.819513 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.819586 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.819608 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.819635 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.819652 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:05Z","lastTransitionTime":"2025-12-04T17:29:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.923180 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.923264 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.923283 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.923314 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:05 crc kubenswrapper[4631]: I1204 17:29:05.923333 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:05Z","lastTransitionTime":"2025-12-04T17:29:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.026968 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.027021 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.027033 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.027052 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.027064 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:06Z","lastTransitionTime":"2025-12-04T17:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.131108 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.131431 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.131549 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.131640 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.131707 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:06Z","lastTransitionTime":"2025-12-04T17:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.234715 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.235044 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.235140 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.235245 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.235332 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:06Z","lastTransitionTime":"2025-12-04T17:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.240590 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.240637 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.240596 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.240596 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:06 crc kubenswrapper[4631]: E1204 17:29:06.240750 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:06 crc kubenswrapper[4631]: E1204 17:29:06.240851 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:06 crc kubenswrapper[4631]: E1204 17:29:06.240922 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:06 crc kubenswrapper[4631]: E1204 17:29:06.240968 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.338590 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.339098 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.339248 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.339451 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.339681 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:06Z","lastTransitionTime":"2025-12-04T17:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.442958 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.443076 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.443089 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.443108 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.443121 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:06Z","lastTransitionTime":"2025-12-04T17:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.546200 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.546258 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.546269 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.546285 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.546297 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:06Z","lastTransitionTime":"2025-12-04T17:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.649208 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.649658 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.649801 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.649931 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.650062 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:06Z","lastTransitionTime":"2025-12-04T17:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.752302 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.752384 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.752396 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.752548 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.752562 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:06Z","lastTransitionTime":"2025-12-04T17:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.855494 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.855562 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.855575 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.855645 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.855659 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:06Z","lastTransitionTime":"2025-12-04T17:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.959323 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.959398 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.959416 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.959443 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:06 crc kubenswrapper[4631]: I1204 17:29:06.959466 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:06Z","lastTransitionTime":"2025-12-04T17:29:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.063140 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.063191 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.063204 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.063222 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.063233 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:07Z","lastTransitionTime":"2025-12-04T17:29:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.166360 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.166418 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.166428 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.166441 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.166451 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:07Z","lastTransitionTime":"2025-12-04T17:29:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.269012 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.269444 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.269602 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.269809 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.269977 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:07Z","lastTransitionTime":"2025-12-04T17:29:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.373556 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.373627 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.373647 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.373671 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.373692 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:07Z","lastTransitionTime":"2025-12-04T17:29:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.476446 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.477635 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.477904 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.478112 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.478310 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:07Z","lastTransitionTime":"2025-12-04T17:29:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.587746 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.587871 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.587891 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.587914 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.587931 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:07Z","lastTransitionTime":"2025-12-04T17:29:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.695214 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.695280 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.695298 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.695327 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.695351 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:07Z","lastTransitionTime":"2025-12-04T17:29:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.799735 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.799797 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.799815 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.799881 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.799900 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:07Z","lastTransitionTime":"2025-12-04T17:29:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.902968 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.903041 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.903057 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.903082 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:07 crc kubenswrapper[4631]: I1204 17:29:07.903100 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:07Z","lastTransitionTime":"2025-12-04T17:29:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.006565 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.006632 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.006646 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.006671 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.006686 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:08Z","lastTransitionTime":"2025-12-04T17:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.110116 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.110156 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.110168 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.110211 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.110228 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:08Z","lastTransitionTime":"2025-12-04T17:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.214443 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.214534 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.214561 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.214596 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.214621 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:08Z","lastTransitionTime":"2025-12-04T17:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.239140 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.239163 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.239234 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:08 crc kubenswrapper[4631]: E1204 17:29:08.239351 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:08 crc kubenswrapper[4631]: E1204 17:29:08.239484 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.240000 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:08 crc kubenswrapper[4631]: E1204 17:29:08.240065 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:08 crc kubenswrapper[4631]: E1204 17:29:08.240151 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.241043 4631 scope.go:117] "RemoveContainer" containerID="440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866" Dec 04 17:29:08 crc kubenswrapper[4631]: E1204 17:29:08.241478 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podUID="0d617abc-dc04-4807-b684-3640cde38e81" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.259359 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.318570 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.318631 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.318665 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.318693 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.318707 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:08Z","lastTransitionTime":"2025-12-04T17:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.421946 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.422010 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.422025 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.422043 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.422055 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:08Z","lastTransitionTime":"2025-12-04T17:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.525926 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.525968 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.525981 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.526038 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.526051 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:08Z","lastTransitionTime":"2025-12-04T17:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.628792 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.628833 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.628842 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.628856 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.628867 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:08Z","lastTransitionTime":"2025-12-04T17:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.730909 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.730959 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.730971 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.730992 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.731007 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:08Z","lastTransitionTime":"2025-12-04T17:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.835251 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.835327 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.835354 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.835443 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.835475 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:08Z","lastTransitionTime":"2025-12-04T17:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.938651 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.938719 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.938732 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.938776 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:08 crc kubenswrapper[4631]: I1204 17:29:08.938792 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:08Z","lastTransitionTime":"2025-12-04T17:29:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.041792 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.041844 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.041924 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.042277 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.042312 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:09Z","lastTransitionTime":"2025-12-04T17:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.145232 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.145275 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.145288 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.145309 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.145322 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:09Z","lastTransitionTime":"2025-12-04T17:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.248294 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.248411 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.248432 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.248455 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.248474 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:09Z","lastTransitionTime":"2025-12-04T17:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.351972 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.352040 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.352058 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.352083 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.352103 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:09Z","lastTransitionTime":"2025-12-04T17:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.456012 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.456091 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.456106 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.456125 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.456141 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:09Z","lastTransitionTime":"2025-12-04T17:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.558858 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.558903 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.558918 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.558937 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.558953 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:09Z","lastTransitionTime":"2025-12-04T17:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.662112 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.662180 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.662192 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.662218 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.662237 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:09Z","lastTransitionTime":"2025-12-04T17:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.765522 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.765596 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.765614 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.765641 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.765659 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:09Z","lastTransitionTime":"2025-12-04T17:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.868700 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.868771 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.868817 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.868849 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.868869 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:09Z","lastTransitionTime":"2025-12-04T17:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.971668 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.971749 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.971765 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.971784 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:09 crc kubenswrapper[4631]: I1204 17:29:09.971849 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:09Z","lastTransitionTime":"2025-12-04T17:29:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.075161 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.075217 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.075226 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.075240 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.075250 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:10Z","lastTransitionTime":"2025-12-04T17:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.177773 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.177805 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.177817 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.177833 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.177844 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:10Z","lastTransitionTime":"2025-12-04T17:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.239304 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:10 crc kubenswrapper[4631]: E1204 17:29:10.239489 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.239608 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:10 crc kubenswrapper[4631]: E1204 17:29:10.240174 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.240529 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:10 crc kubenswrapper[4631]: E1204 17:29:10.240842 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.240743 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:10 crc kubenswrapper[4631]: E1204 17:29:10.241521 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.262125 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:10Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.279711 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:10Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.281122 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.281233 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.281410 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.281668 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.281875 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:10Z","lastTransitionTime":"2025-12-04T17:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.294092 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:10Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.318535 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:10Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.329752 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:10Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.342199 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://712c54ce6d416f06964e2a50bf902d2c18b7bf713a9d67265bfbadc2fbf4ee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96709e063c20b40adae03f18364ec6297f144e6454670c5ab760f150837e165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:10Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.351612 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:10Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.368867 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"message\\\":\\\"perations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/api]} name:Service_openshift-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 17:28:52.896842 6203 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/api]} name:Service_openshift-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 17:28:52.895898 6203 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 17:28:52.897121 6203 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:10Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.380653 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8kcrj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:10Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.385696 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.385754 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.385769 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.385789 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.385802 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:10Z","lastTransitionTime":"2025-12-04T17:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.395001 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:10Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.420304 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:10Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.433751 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"431a4fcf-a830-43a4-94a0-b4c6d871d52f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5556f68691980174d165f902e2983cffe12a923d267b5615c07f9dc7da73efa0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581273bead771845e5261f066fc22bbde1f8a36db2db091b00a9e008fcb181e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://581273bead771845e5261f066fc22bbde1f8a36db2db091b00a9e008fcb181e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:10Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.453921 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:10Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.466715 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf70493c-4094-4783-939d-ac61051c83ac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2232c209adbed3161e31474c06d86e5184985a392f0e7f7c115889bf80ad5266\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17ca1bf820b90d0dcab27c3f59301d671ae001940de1b634174e371a77a5208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b24dbe6d9b284dd49778fcde6a5d7b665e2fca8181a7cb11c2d14028da9fb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:10Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.479831 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:10Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.489089 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.489162 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.489186 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.489212 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.489230 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:10Z","lastTransitionTime":"2025-12-04T17:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.494438 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:10Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.505210 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:10Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.515665 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efdd152e4738f125d721a6d044c0d96a378761e28d18f292c85706d0f3158f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:29:03Z\\\",\\\"message\\\":\\\"2025-12-04T17:28:18+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7912288c-7bc7-4aeb-8144-7db1229d544b\\\\n2025-12-04T17:28:18+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7912288c-7bc7-4aeb-8144-7db1229d544b to /host/opt/cni/bin/\\\\n2025-12-04T17:28:18Z [verbose] multus-daemon started\\\\n2025-12-04T17:28:18Z [verbose] Readiness Indicator file check\\\\n2025-12-04T17:29:03Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:29:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:10Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.593326 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.593438 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.593457 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.593489 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.593510 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:10Z","lastTransitionTime":"2025-12-04T17:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.697460 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.697502 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.697515 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.697536 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.697550 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:10Z","lastTransitionTime":"2025-12-04T17:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.801140 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.801188 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.801199 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.801220 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.801232 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:10Z","lastTransitionTime":"2025-12-04T17:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.904842 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.904914 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.904933 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.904959 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:10 crc kubenswrapper[4631]: I1204 17:29:10.904976 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:10Z","lastTransitionTime":"2025-12-04T17:29:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.008200 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.008281 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.008305 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.008341 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.008401 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:11Z","lastTransitionTime":"2025-12-04T17:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.111184 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.111223 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.111237 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.111255 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.111268 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:11Z","lastTransitionTime":"2025-12-04T17:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.214176 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.214231 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.214250 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.214279 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.214301 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:11Z","lastTransitionTime":"2025-12-04T17:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.317587 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.317657 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.317674 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.317700 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.317717 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:11Z","lastTransitionTime":"2025-12-04T17:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.420713 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.420749 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.420757 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.420770 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.420780 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:11Z","lastTransitionTime":"2025-12-04T17:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.524089 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.524144 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.524156 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.524175 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.524188 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:11Z","lastTransitionTime":"2025-12-04T17:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.627785 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.627833 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.627847 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.627869 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.627884 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:11Z","lastTransitionTime":"2025-12-04T17:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.731833 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.732252 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.732484 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.732720 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.732937 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:11Z","lastTransitionTime":"2025-12-04T17:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.836554 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.836621 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.836636 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.836658 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.836673 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:11Z","lastTransitionTime":"2025-12-04T17:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.940454 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.940528 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.940539 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.940557 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:11 crc kubenswrapper[4631]: I1204 17:29:11.940570 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:11Z","lastTransitionTime":"2025-12-04T17:29:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.042879 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.042999 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.043017 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.043034 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.043046 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:12Z","lastTransitionTime":"2025-12-04T17:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.145804 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.145843 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.145851 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.145865 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.145874 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:12Z","lastTransitionTime":"2025-12-04T17:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.239498 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.239523 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.239522 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.239742 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:12 crc kubenswrapper[4631]: E1204 17:29:12.239843 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:12 crc kubenswrapper[4631]: E1204 17:29:12.240005 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:12 crc kubenswrapper[4631]: E1204 17:29:12.240169 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:12 crc kubenswrapper[4631]: E1204 17:29:12.240256 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.248489 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.248583 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.248625 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.248660 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.248695 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:12Z","lastTransitionTime":"2025-12-04T17:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.351485 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.351552 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.351571 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.351595 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.351614 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:12Z","lastTransitionTime":"2025-12-04T17:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.454426 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.454473 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.454487 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.454503 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.454515 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:12Z","lastTransitionTime":"2025-12-04T17:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.558157 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.558234 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.558259 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.558288 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.558310 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:12Z","lastTransitionTime":"2025-12-04T17:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.660973 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.661032 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.661045 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.661061 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.661078 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:12Z","lastTransitionTime":"2025-12-04T17:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.764046 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.764118 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.764135 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.764158 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.764170 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:12Z","lastTransitionTime":"2025-12-04T17:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.867158 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.867217 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.867233 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.867254 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.867268 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:12Z","lastTransitionTime":"2025-12-04T17:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.969965 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.970031 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.970047 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.970077 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:12 crc kubenswrapper[4631]: I1204 17:29:12.970097 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:12Z","lastTransitionTime":"2025-12-04T17:29:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.072293 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.072346 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.072362 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.072403 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.072420 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:13Z","lastTransitionTime":"2025-12-04T17:29:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.175705 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.175749 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.175761 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.175776 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.175787 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:13Z","lastTransitionTime":"2025-12-04T17:29:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.279045 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.279099 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.279119 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.279136 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.279149 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:13Z","lastTransitionTime":"2025-12-04T17:29:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.382352 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.382440 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.382452 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.382469 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.382485 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:13Z","lastTransitionTime":"2025-12-04T17:29:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.485803 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.485864 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.485884 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.485911 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.485929 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:13Z","lastTransitionTime":"2025-12-04T17:29:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.590179 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.590249 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.590275 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.590307 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.590330 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:13Z","lastTransitionTime":"2025-12-04T17:29:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.693723 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.693775 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.693786 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.693807 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.693823 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:13Z","lastTransitionTime":"2025-12-04T17:29:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.797208 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.797600 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.797718 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.797836 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.797949 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:13Z","lastTransitionTime":"2025-12-04T17:29:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.900550 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.900597 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.900607 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.900623 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:13 crc kubenswrapper[4631]: I1204 17:29:13.900635 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:13Z","lastTransitionTime":"2025-12-04T17:29:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.004236 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.004278 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.004293 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.004315 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.004330 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:14Z","lastTransitionTime":"2025-12-04T17:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.107330 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.107437 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.107456 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.107482 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.107500 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:14Z","lastTransitionTime":"2025-12-04T17:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.211676 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.211838 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.211888 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.212015 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.212087 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:14Z","lastTransitionTime":"2025-12-04T17:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.238742 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.238778 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.238838 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:14 crc kubenswrapper[4631]: E1204 17:29:14.239642 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:14 crc kubenswrapper[4631]: E1204 17:29:14.239021 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:14 crc kubenswrapper[4631]: E1204 17:29:14.239749 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.238742 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:14 crc kubenswrapper[4631]: E1204 17:29:14.239943 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.315759 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.315820 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.315830 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.315859 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.315876 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:14Z","lastTransitionTime":"2025-12-04T17:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.418585 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.418644 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.418662 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.418691 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.418711 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:14Z","lastTransitionTime":"2025-12-04T17:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.521899 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.521980 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.521999 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.522026 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.522078 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:14Z","lastTransitionTime":"2025-12-04T17:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.625573 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.625631 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.625645 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.625669 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.625685 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:14Z","lastTransitionTime":"2025-12-04T17:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.728577 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.728663 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.728689 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.728726 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.728752 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:14Z","lastTransitionTime":"2025-12-04T17:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.833219 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.833331 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.833463 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.833499 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.833528 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:14Z","lastTransitionTime":"2025-12-04T17:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.893029 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.893092 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.893103 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.893125 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.893164 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:14Z","lastTransitionTime":"2025-12-04T17:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:14 crc kubenswrapper[4631]: E1204 17:29:14.907633 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:14Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.913799 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.913881 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.913901 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.913932 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.913950 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:14Z","lastTransitionTime":"2025-12-04T17:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:14 crc kubenswrapper[4631]: E1204 17:29:14.931972 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:14Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.936741 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.936849 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.936870 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.936940 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.936993 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:14Z","lastTransitionTime":"2025-12-04T17:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:14 crc kubenswrapper[4631]: E1204 17:29:14.950785 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:14Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.956754 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.956963 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.957161 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.957357 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.957508 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:14Z","lastTransitionTime":"2025-12-04T17:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:14 crc kubenswrapper[4631]: E1204 17:29:14.973056 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:14Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.979469 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.979531 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.979548 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.979574 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:14 crc kubenswrapper[4631]: I1204 17:29:14.979596 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:14Z","lastTransitionTime":"2025-12-04T17:29:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:15 crc kubenswrapper[4631]: E1204 17:29:15.002939 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:14Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:15 crc kubenswrapper[4631]: E1204 17:29:15.003111 4631 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.006882 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.006958 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.006997 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.007030 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.007056 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:15Z","lastTransitionTime":"2025-12-04T17:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.110152 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.110197 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.110210 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.110230 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.110244 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:15Z","lastTransitionTime":"2025-12-04T17:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.212877 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.212925 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.212936 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.212955 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.212967 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:15Z","lastTransitionTime":"2025-12-04T17:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.316711 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.316763 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.316778 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.316802 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.316818 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:15Z","lastTransitionTime":"2025-12-04T17:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.420845 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.420891 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.420904 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.420925 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.420936 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:15Z","lastTransitionTime":"2025-12-04T17:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.523568 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.523627 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.523642 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.523666 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.523681 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:15Z","lastTransitionTime":"2025-12-04T17:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.626709 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.626788 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.626812 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.626846 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.626870 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:15Z","lastTransitionTime":"2025-12-04T17:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.730016 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.730079 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.730096 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.730120 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.730135 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:15Z","lastTransitionTime":"2025-12-04T17:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.833180 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.833246 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.833262 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.833284 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.833299 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:15Z","lastTransitionTime":"2025-12-04T17:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.952694 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.952728 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.952738 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.952756 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:15 crc kubenswrapper[4631]: I1204 17:29:15.952810 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:15Z","lastTransitionTime":"2025-12-04T17:29:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.055965 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.056028 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.056038 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.056055 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.056067 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:16Z","lastTransitionTime":"2025-12-04T17:29:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.158356 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.158427 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.158438 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.158453 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.158462 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:16Z","lastTransitionTime":"2025-12-04T17:29:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.239094 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.239133 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.239203 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:16 crc kubenswrapper[4631]: E1204 17:29:16.239253 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.239489 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:16 crc kubenswrapper[4631]: E1204 17:29:16.239616 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:16 crc kubenswrapper[4631]: E1204 17:29:16.239699 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:16 crc kubenswrapper[4631]: E1204 17:29:16.239773 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.260995 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.261035 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.261047 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.261062 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.261075 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:16Z","lastTransitionTime":"2025-12-04T17:29:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.365172 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.365234 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.365247 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.365269 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.365285 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:16Z","lastTransitionTime":"2025-12-04T17:29:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.468213 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.468266 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.468310 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.468332 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.468344 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:16Z","lastTransitionTime":"2025-12-04T17:29:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.571062 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.571176 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.571243 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.571267 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.571281 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:16Z","lastTransitionTime":"2025-12-04T17:29:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.674338 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.674702 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.674793 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.674894 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.674977 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:16Z","lastTransitionTime":"2025-12-04T17:29:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.777414 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.778306 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.778442 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.778571 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.778669 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:16Z","lastTransitionTime":"2025-12-04T17:29:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.882367 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.882468 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.882492 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.882523 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.882542 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:16Z","lastTransitionTime":"2025-12-04T17:29:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.986061 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.986127 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.986142 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.986168 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:16 crc kubenswrapper[4631]: I1204 17:29:16.986184 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:16Z","lastTransitionTime":"2025-12-04T17:29:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.088450 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.088510 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.088520 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.088539 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.088550 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:17Z","lastTransitionTime":"2025-12-04T17:29:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.191556 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.191613 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.191628 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.191653 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.191671 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:17Z","lastTransitionTime":"2025-12-04T17:29:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.294997 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.295052 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.295063 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.295086 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.295101 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:17Z","lastTransitionTime":"2025-12-04T17:29:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.398143 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.398554 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.398618 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.398685 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.398742 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:17Z","lastTransitionTime":"2025-12-04T17:29:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.506856 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.506900 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.506909 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.506923 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.506935 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:17Z","lastTransitionTime":"2025-12-04T17:29:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.610267 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.610309 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.610318 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.610331 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.610344 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:17Z","lastTransitionTime":"2025-12-04T17:29:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.713891 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.713959 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.714013 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.714052 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.714075 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:17Z","lastTransitionTime":"2025-12-04T17:29:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.816442 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.816479 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.816488 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.816504 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.816515 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:17Z","lastTransitionTime":"2025-12-04T17:29:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.919655 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.919689 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.919700 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.919717 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:17 crc kubenswrapper[4631]: I1204 17:29:17.919727 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:17Z","lastTransitionTime":"2025-12-04T17:29:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.022034 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.022096 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.022105 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.022117 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.022125 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:18Z","lastTransitionTime":"2025-12-04T17:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.125509 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.125556 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.125569 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.125588 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.125601 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:18Z","lastTransitionTime":"2025-12-04T17:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.230758 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.230806 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.230820 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.230841 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.230857 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:18Z","lastTransitionTime":"2025-12-04T17:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.238477 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:18 crc kubenswrapper[4631]: E1204 17:29:18.238910 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.238547 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:18 crc kubenswrapper[4631]: E1204 17:29:18.239160 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.238537 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:18 crc kubenswrapper[4631]: E1204 17:29:18.239360 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.238547 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:18 crc kubenswrapper[4631]: E1204 17:29:18.239575 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.333881 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.333919 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.333927 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.333940 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.333949 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:18Z","lastTransitionTime":"2025-12-04T17:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.436954 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.436998 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.437010 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.437032 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.437042 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:18Z","lastTransitionTime":"2025-12-04T17:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.540300 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.540344 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.540354 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.540387 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.540401 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:18Z","lastTransitionTime":"2025-12-04T17:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.643832 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.644170 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.644254 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.644362 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.644511 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:18Z","lastTransitionTime":"2025-12-04T17:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.746891 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.746938 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.746949 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.746964 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.746977 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:18Z","lastTransitionTime":"2025-12-04T17:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.850860 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.850929 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.850939 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.850958 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.850967 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:18Z","lastTransitionTime":"2025-12-04T17:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.954785 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.954837 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.954846 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.954865 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:18 crc kubenswrapper[4631]: I1204 17:29:18.954892 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:18Z","lastTransitionTime":"2025-12-04T17:29:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.058349 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.058421 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.058436 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.058461 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.058476 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:19Z","lastTransitionTime":"2025-12-04T17:29:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.162031 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.162094 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.162114 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.162146 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.162169 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:19Z","lastTransitionTime":"2025-12-04T17:29:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.265540 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.265580 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.265589 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.265606 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.265615 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:19Z","lastTransitionTime":"2025-12-04T17:29:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.368507 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.368557 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.368570 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.368590 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.368602 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:19Z","lastTransitionTime":"2025-12-04T17:29:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.471456 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.471852 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.472047 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.472943 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.473083 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:19Z","lastTransitionTime":"2025-12-04T17:29:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.576291 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.576359 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.576418 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.576448 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.576466 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:19Z","lastTransitionTime":"2025-12-04T17:29:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.679680 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.679761 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.679781 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.679806 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.679824 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:19Z","lastTransitionTime":"2025-12-04T17:29:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.783690 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.783743 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.783756 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.783779 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.783793 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:19Z","lastTransitionTime":"2025-12-04T17:29:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.886651 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.886724 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.886747 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.886777 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.886799 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:19Z","lastTransitionTime":"2025-12-04T17:29:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.990336 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.990432 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.990451 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.990478 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:19 crc kubenswrapper[4631]: I1204 17:29:19.990534 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:19Z","lastTransitionTime":"2025-12-04T17:29:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.093867 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.093907 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.093916 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.093932 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.093944 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:20Z","lastTransitionTime":"2025-12-04T17:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.097310 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:29:20 crc kubenswrapper[4631]: E1204 17:29:20.097471 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:24.097449554 +0000 UTC m=+154.129691552 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.198436 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.198489 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.198504 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.198528 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.198535 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.198546 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:20Z","lastTransitionTime":"2025-12-04T17:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.198602 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.198645 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.198685 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:20 crc kubenswrapper[4631]: E1204 17:29:20.198899 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 17:29:20 crc kubenswrapper[4631]: E1204 17:29:20.198929 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 17:29:20 crc kubenswrapper[4631]: E1204 17:29:20.198947 4631 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:29:20 crc kubenswrapper[4631]: E1204 17:29:20.199019 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 17:30:24.198993393 +0000 UTC m=+154.231235411 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:29:20 crc kubenswrapper[4631]: E1204 17:29:20.199110 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 17:29:20 crc kubenswrapper[4631]: E1204 17:29:20.199129 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 17:29:20 crc kubenswrapper[4631]: E1204 17:29:20.199146 4631 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:29:20 crc kubenswrapper[4631]: E1204 17:29:20.199187 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 17:30:24.199173798 +0000 UTC m=+154.231415806 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:29:20 crc kubenswrapper[4631]: E1204 17:29:20.199241 4631 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 17:29:20 crc kubenswrapper[4631]: E1204 17:29:20.199279 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 17:30:24.199266661 +0000 UTC m=+154.231508679 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 17:29:20 crc kubenswrapper[4631]: E1204 17:29:20.199356 4631 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 17:29:20 crc kubenswrapper[4631]: E1204 17:29:20.199427 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 17:30:24.199410695 +0000 UTC m=+154.231652713 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.239069 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.239107 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.239127 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:20 crc kubenswrapper[4631]: E1204 17:29:20.239286 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.239328 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:20 crc kubenswrapper[4631]: E1204 17:29:20.239540 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:20 crc kubenswrapper[4631]: E1204 17:29:20.239752 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:20 crc kubenswrapper[4631]: E1204 17:29:20.239824 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.259489 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.281714 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.299941 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efdd152e4738f125d721a6d044c0d96a378761e28d18f292c85706d0f3158f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:29:03Z\\\",\\\"message\\\":\\\"2025-12-04T17:28:18+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7912288c-7bc7-4aeb-8144-7db1229d544b\\\\n2025-12-04T17:28:18+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7912288c-7bc7-4aeb-8144-7db1229d544b to /host/opt/cni/bin/\\\\n2025-12-04T17:28:18Z [verbose] multus-daemon started\\\\n2025-12-04T17:28:18Z [verbose] Readiness Indicator file check\\\\n2025-12-04T17:29:03Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:29:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.302657 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.302741 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.302756 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.302792 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.302807 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:20Z","lastTransitionTime":"2025-12-04T17:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.316482 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"431a4fcf-a830-43a4-94a0-b4c6d871d52f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5556f68691980174d165f902e2983cffe12a923d267b5615c07f9dc7da73efa0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581273bead771845e5261f066fc22bbde1f8a36db2db091b00a9e008fcb181e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://581273bead771845e5261f066fc22bbde1f8a36db2db091b00a9e008fcb181e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.335021 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.347189 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf70493c-4094-4783-939d-ac61051c83ac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2232c209adbed3161e31474c06d86e5184985a392f0e7f7c115889bf80ad5266\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17ca1bf820b90d0dcab27c3f59301d671ae001940de1b634174e371a77a5208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b24dbe6d9b284dd49778fcde6a5d7b665e2fca8181a7cb11c2d14028da9fb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.360906 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.375187 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.387482 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.399398 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://712c54ce6d416f06964e2a50bf902d2c18b7bf713a9d67265bfbadc2fbf4ee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96709e063c20b40adae03f18364ec6297f144e6454670c5ab760f150837e165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.405682 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.405729 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.405742 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.405763 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.405782 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:20Z","lastTransitionTime":"2025-12-04T17:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.415742 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.429469 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.442203 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.457156 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.467930 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.477945 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.499736 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"message\\\":\\\"perations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/api]} name:Service_openshift-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 17:28:52.896842 6203 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/api]} name:Service_openshift-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 17:28:52.895898 6203 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 17:28:52.897121 6203 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.508775 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.508819 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.508830 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.508867 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.508880 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:20Z","lastTransitionTime":"2025-12-04T17:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.511644 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8kcrj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:20Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.612136 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.612220 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.612241 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.612269 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.612292 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:20Z","lastTransitionTime":"2025-12-04T17:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.715182 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.715223 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.715233 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.715250 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.715261 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:20Z","lastTransitionTime":"2025-12-04T17:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.818249 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.818594 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.818679 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.818777 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.818849 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:20Z","lastTransitionTime":"2025-12-04T17:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.921958 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.922034 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.922054 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.922084 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:20 crc kubenswrapper[4631]: I1204 17:29:20.922102 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:20Z","lastTransitionTime":"2025-12-04T17:29:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.025825 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.025935 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.025952 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.025975 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.025994 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:21Z","lastTransitionTime":"2025-12-04T17:29:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.130085 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.130699 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.130724 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.130754 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.130772 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:21Z","lastTransitionTime":"2025-12-04T17:29:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.234032 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.234141 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.234155 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.234174 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.234185 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:21Z","lastTransitionTime":"2025-12-04T17:29:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.240361 4631 scope.go:117] "RemoveContainer" containerID="440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.337555 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.337626 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.337650 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.337694 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.337725 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:21Z","lastTransitionTime":"2025-12-04T17:29:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.442639 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.442723 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.442751 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.442785 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.442812 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:21Z","lastTransitionTime":"2025-12-04T17:29:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.546099 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.546162 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.546184 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.546214 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.546237 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:21Z","lastTransitionTime":"2025-12-04T17:29:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.648604 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.648665 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.648678 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.648718 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.648732 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:21Z","lastTransitionTime":"2025-12-04T17:29:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.752173 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.752368 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.752445 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.752478 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.752501 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:21Z","lastTransitionTime":"2025-12-04T17:29:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.855438 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.855505 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.855518 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.855557 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.855568 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:21Z","lastTransitionTime":"2025-12-04T17:29:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.959144 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.959205 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.959221 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.959247 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:21 crc kubenswrapper[4631]: I1204 17:29:21.959264 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:21Z","lastTransitionTime":"2025-12-04T17:29:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.062262 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.062307 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.062322 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.062346 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.062360 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:22Z","lastTransitionTime":"2025-12-04T17:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.165965 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.166012 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.166025 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.166049 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.166065 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:22Z","lastTransitionTime":"2025-12-04T17:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.231856 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovnkube-controller/2.log" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.235741 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerStarted","Data":"04cf802bdf1fd0d81a685ca8171bfcea89086fdd4979fcc18df7c134d482e052"} Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.236587 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.238848 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.238895 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:22 crc kubenswrapper[4631]: E1204 17:29:22.238940 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:22 crc kubenswrapper[4631]: E1204 17:29:22.238979 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.238988 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:22 crc kubenswrapper[4631]: E1204 17:29:22.239423 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.239738 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:22 crc kubenswrapper[4631]: E1204 17:29:22.239830 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.251731 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.269195 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.269241 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.269258 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.269281 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.269299 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:22Z","lastTransitionTime":"2025-12-04T17:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.271978 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efdd152e4738f125d721a6d044c0d96a378761e28d18f292c85706d0f3158f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:29:03Z\\\",\\\"message\\\":\\\"2025-12-04T17:28:18+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7912288c-7bc7-4aeb-8144-7db1229d544b\\\\n2025-12-04T17:28:18+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7912288c-7bc7-4aeb-8144-7db1229d544b to /host/opt/cni/bin/\\\\n2025-12-04T17:28:18Z [verbose] multus-daemon started\\\\n2025-12-04T17:28:18Z [verbose] Readiness Indicator file check\\\\n2025-12-04T17:29:03Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:29:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.283798 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"431a4fcf-a830-43a4-94a0-b4c6d871d52f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5556f68691980174d165f902e2983cffe12a923d267b5615c07f9dc7da73efa0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581273bead771845e5261f066fc22bbde1f8a36db2db091b00a9e008fcb181e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://581273bead771845e5261f066fc22bbde1f8a36db2db091b00a9e008fcb181e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.297728 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.309728 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf70493c-4094-4783-939d-ac61051c83ac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2232c209adbed3161e31474c06d86e5184985a392f0e7f7c115889bf80ad5266\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17ca1bf820b90d0dcab27c3f59301d671ae001940de1b634174e371a77a5208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b24dbe6d9b284dd49778fcde6a5d7b665e2fca8181a7cb11c2d14028da9fb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.327581 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.344837 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.355170 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.367463 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://712c54ce6d416f06964e2a50bf902d2c18b7bf713a9d67265bfbadc2fbf4ee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96709e063c20b40adae03f18364ec6297f144e6454670c5ab760f150837e165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.372163 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.372231 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.372254 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.372329 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.372418 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:22Z","lastTransitionTime":"2025-12-04T17:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.382238 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.398742 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.411914 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.431332 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.445998 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.465566 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04cf802bdf1fd0d81a685ca8171bfcea89086fdd4979fcc18df7c134d482e052\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"message\\\":\\\"perations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/api]} name:Service_openshift-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 17:28:52.896842 6203 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/api]} name:Service_openshift-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 17:28:52.895898 6203 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 17:28:52.897121 6203 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:29:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.475005 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.475039 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.475050 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.475070 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.475083 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:22Z","lastTransitionTime":"2025-12-04T17:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.477405 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8kcrj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.490326 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.502918 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:22Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.579135 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.579613 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.579895 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.580087 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.580240 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:22Z","lastTransitionTime":"2025-12-04T17:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.682773 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.682856 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.682876 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.682907 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.682928 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:22Z","lastTransitionTime":"2025-12-04T17:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.786407 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.786469 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.786488 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.786514 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.786536 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:22Z","lastTransitionTime":"2025-12-04T17:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.890389 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.890433 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.890448 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.890464 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.890477 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:22Z","lastTransitionTime":"2025-12-04T17:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.993724 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.993791 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.993811 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.993841 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:22 crc kubenswrapper[4631]: I1204 17:29:22.993862 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:22Z","lastTransitionTime":"2025-12-04T17:29:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.182635 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.182683 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.182703 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.182721 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.182736 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:23Z","lastTransitionTime":"2025-12-04T17:29:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.247977 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovnkube-controller/3.log" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.248868 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovnkube-controller/2.log" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.252891 4631 generic.go:334] "Generic (PLEG): container finished" podID="0d617abc-dc04-4807-b684-3640cde38e81" containerID="04cf802bdf1fd0d81a685ca8171bfcea89086fdd4979fcc18df7c134d482e052" exitCode=1 Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.252937 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerDied","Data":"04cf802bdf1fd0d81a685ca8171bfcea89086fdd4979fcc18df7c134d482e052"} Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.253041 4631 scope.go:117] "RemoveContainer" containerID="440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.253753 4631 scope.go:117] "RemoveContainer" containerID="04cf802bdf1fd0d81a685ca8171bfcea89086fdd4979fcc18df7c134d482e052" Dec 04 17:29:23 crc kubenswrapper[4631]: E1204 17:29:23.253960 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podUID="0d617abc-dc04-4807-b684-3640cde38e81" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.275586 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.286659 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.287090 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.287107 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.287130 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.287146 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:23Z","lastTransitionTime":"2025-12-04T17:29:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.292633 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.309058 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf70493c-4094-4783-939d-ac61051c83ac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2232c209adbed3161e31474c06d86e5184985a392f0e7f7c115889bf80ad5266\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17ca1bf820b90d0dcab27c3f59301d671ae001940de1b634174e371a77a5208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b24dbe6d9b284dd49778fcde6a5d7b665e2fca8181a7cb11c2d14028da9fb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.328517 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.350105 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.368974 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.388327 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efdd152e4738f125d721a6d044c0d96a378761e28d18f292c85706d0f3158f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:29:03Z\\\",\\\"message\\\":\\\"2025-12-04T17:28:18+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7912288c-7bc7-4aeb-8144-7db1229d544b\\\\n2025-12-04T17:28:18+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7912288c-7bc7-4aeb-8144-7db1229d544b to /host/opt/cni/bin/\\\\n2025-12-04T17:28:18Z [verbose] multus-daemon started\\\\n2025-12-04T17:28:18Z [verbose] Readiness Indicator file check\\\\n2025-12-04T17:29:03Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:29:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.390735 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.390770 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.390782 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.390804 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.390819 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:23Z","lastTransitionTime":"2025-12-04T17:29:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.406077 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"431a4fcf-a830-43a4-94a0-b4c6d871d52f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5556f68691980174d165f902e2983cffe12a923d267b5615c07f9dc7da73efa0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581273bead771845e5261f066fc22bbde1f8a36db2db091b00a9e008fcb181e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://581273bead771845e5261f066fc22bbde1f8a36db2db091b00a9e008fcb181e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.419989 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.434727 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.447540 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.462073 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.472941 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.484419 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://712c54ce6d416f06964e2a50bf902d2c18b7bf713a9d67265bfbadc2fbf4ee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96709e063c20b40adae03f18364ec6297f144e6454670c5ab760f150837e165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.493219 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.493252 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.493264 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.493292 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.493302 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:23Z","lastTransitionTime":"2025-12-04T17:29:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.496489 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.505349 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8kcrj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.515992 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.531697 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04cf802bdf1fd0d81a685ca8171bfcea89086fdd4979fcc18df7c134d482e052\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://440aad2f92b9210c3a0153d72b3776a8a0010d3ca30b844c6740219dd8f5a866\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"message\\\":\\\"perations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/api]} name:Service_openshift-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 17:28:52.896842 6203 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/api]} name:Service_openshift-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.37:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {88e20c31-5b8d-4d44-bbd8-dba87b7dbaf0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1204 17:28:52.895898 6203 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1204 17:28:52.897121 6203 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04cf802bdf1fd0d81a685ca8171bfcea89086fdd4979fcc18df7c134d482e052\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:29:22Z\\\",\\\"message\\\":\\\"led objects of type *v1.Pod\\\\nI1204 17:29:22.424260 6484 obj_retry.go:409] Going to retry *v1.Pod resource setup for 13 objects: [openshift-network-operator/iptables-alerter-4ln5h openshift-image-registry/node-ca-6ns2w openshift-multus/multus-additional-cni-plugins-bfhhc openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-ovn-kubernetes/ovnkube-node-vpgzg openshift-kube-controller-manager/kube-controller-manager-crc openshift-kube-scheduler/openshift-kube-scheduler-crc openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-network-operator/network-operator-58b4c7f79c-55gtf openshift-multus/network-metrics-daemon-8kcrj openshift-dns/node-resolver-dzxft openshift-kube-apiserver/kube-apiserver-crc openshift-machine-config-operator/kube-rbac-proxy-crio-crc]\\\\nI1204 17:29:22.424291 6484 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nF1204 17:29:22.424311 6484 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:29:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:23Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.596063 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.596152 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.596176 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.596209 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.596232 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:23Z","lastTransitionTime":"2025-12-04T17:29:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.699744 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.699788 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.699797 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.699812 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.699822 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:23Z","lastTransitionTime":"2025-12-04T17:29:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.803600 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.804127 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.804330 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.804552 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.804734 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:23Z","lastTransitionTime":"2025-12-04T17:29:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.909340 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.909794 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.909995 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.910360 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:23 crc kubenswrapper[4631]: I1204 17:29:23.910631 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:23Z","lastTransitionTime":"2025-12-04T17:29:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.014744 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.014826 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.014849 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.014879 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.014900 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:24Z","lastTransitionTime":"2025-12-04T17:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.119089 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.119164 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.119185 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.119215 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.119240 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:24Z","lastTransitionTime":"2025-12-04T17:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.222762 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.222838 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.222856 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.222889 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.222911 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:24Z","lastTransitionTime":"2025-12-04T17:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.239713 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.239819 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:24 crc kubenswrapper[4631]: E1204 17:29:24.239883 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.239829 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:24 crc kubenswrapper[4631]: E1204 17:29:24.240015 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.240067 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:24 crc kubenswrapper[4631]: E1204 17:29:24.240173 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:24 crc kubenswrapper[4631]: E1204 17:29:24.240281 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.260207 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovnkube-controller/3.log" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.266172 4631 scope.go:117] "RemoveContainer" containerID="04cf802bdf1fd0d81a685ca8171bfcea89086fdd4979fcc18df7c134d482e052" Dec 04 17:29:24 crc kubenswrapper[4631]: E1204 17:29:24.266864 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podUID="0d617abc-dc04-4807-b684-3640cde38e81" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.286292 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:24Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.308062 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:24Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.326753 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.326819 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.326839 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.326873 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.326896 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:24Z","lastTransitionTime":"2025-12-04T17:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.327743 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:24Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.409889 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:24Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.426611 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:24Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.430069 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.430104 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.430116 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.430136 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.430150 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:24Z","lastTransitionTime":"2025-12-04T17:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.443117 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://712c54ce6d416f06964e2a50bf902d2c18b7bf713a9d67265bfbadc2fbf4ee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96709e063c20b40adae03f18364ec6297f144e6454670c5ab760f150837e165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:24Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.455666 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:24Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.478529 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04cf802bdf1fd0d81a685ca8171bfcea89086fdd4979fcc18df7c134d482e052\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04cf802bdf1fd0d81a685ca8171bfcea89086fdd4979fcc18df7c134d482e052\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:29:22Z\\\",\\\"message\\\":\\\"led objects of type *v1.Pod\\\\nI1204 17:29:22.424260 6484 obj_retry.go:409] Going to retry *v1.Pod resource setup for 13 objects: [openshift-network-operator/iptables-alerter-4ln5h openshift-image-registry/node-ca-6ns2w openshift-multus/multus-additional-cni-plugins-bfhhc openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-ovn-kubernetes/ovnkube-node-vpgzg openshift-kube-controller-manager/kube-controller-manager-crc openshift-kube-scheduler/openshift-kube-scheduler-crc openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-network-operator/network-operator-58b4c7f79c-55gtf openshift-multus/network-metrics-daemon-8kcrj openshift-dns/node-resolver-dzxft openshift-kube-apiserver/kube-apiserver-crc openshift-machine-config-operator/kube-rbac-proxy-crio-crc]\\\\nI1204 17:29:22.424291 6484 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nF1204 17:29:22.424311 6484 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:29:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:24Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.492905 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8kcrj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:24Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.505787 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:24Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.518187 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:24Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.532944 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.533006 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.533018 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.533035 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.533050 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:24Z","lastTransitionTime":"2025-12-04T17:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.533436 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"431a4fcf-a830-43a4-94a0-b4c6d871d52f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5556f68691980174d165f902e2983cffe12a923d267b5615c07f9dc7da73efa0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581273bead771845e5261f066fc22bbde1f8a36db2db091b00a9e008fcb181e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://581273bead771845e5261f066fc22bbde1f8a36db2db091b00a9e008fcb181e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:24Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.548265 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:24Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.560802 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf70493c-4094-4783-939d-ac61051c83ac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2232c209adbed3161e31474c06d86e5184985a392f0e7f7c115889bf80ad5266\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17ca1bf820b90d0dcab27c3f59301d671ae001940de1b634174e371a77a5208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b24dbe6d9b284dd49778fcde6a5d7b665e2fca8181a7cb11c2d14028da9fb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:24Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.573711 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:24Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.588780 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:24Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.603926 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:24Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.620762 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efdd152e4738f125d721a6d044c0d96a378761e28d18f292c85706d0f3158f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:29:03Z\\\",\\\"message\\\":\\\"2025-12-04T17:28:18+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7912288c-7bc7-4aeb-8144-7db1229d544b\\\\n2025-12-04T17:28:18+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7912288c-7bc7-4aeb-8144-7db1229d544b to /host/opt/cni/bin/\\\\n2025-12-04T17:28:18Z [verbose] multus-daemon started\\\\n2025-12-04T17:28:18Z [verbose] Readiness Indicator file check\\\\n2025-12-04T17:29:03Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:29:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:24Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.636147 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.636199 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.636209 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.636222 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.636230 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:24Z","lastTransitionTime":"2025-12-04T17:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.739691 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.739742 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.739751 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.739768 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.739779 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:24Z","lastTransitionTime":"2025-12-04T17:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.843220 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.843300 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.843319 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.843346 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.843365 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:24Z","lastTransitionTime":"2025-12-04T17:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.947542 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.947613 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.947646 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.947679 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:24 crc kubenswrapper[4631]: I1204 17:29:24.947698 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:24Z","lastTransitionTime":"2025-12-04T17:29:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.036445 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.036520 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.036543 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.036577 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.036602 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:25Z","lastTransitionTime":"2025-12-04T17:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:25 crc kubenswrapper[4631]: E1204 17:29:25.059426 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.066164 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.066200 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.066212 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.066226 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.066238 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:25Z","lastTransitionTime":"2025-12-04T17:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:25 crc kubenswrapper[4631]: E1204 17:29:25.084655 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.088601 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.088644 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.088657 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.088677 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.088688 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:25Z","lastTransitionTime":"2025-12-04T17:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:25 crc kubenswrapper[4631]: E1204 17:29:25.101617 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.107677 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.107718 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.107732 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.107751 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.107768 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:25Z","lastTransitionTime":"2025-12-04T17:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:25 crc kubenswrapper[4631]: E1204 17:29:25.121460 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.125450 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.125490 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.125502 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.125518 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.125532 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:25Z","lastTransitionTime":"2025-12-04T17:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:25 crc kubenswrapper[4631]: E1204 17:29:25.143119 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:25Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:25 crc kubenswrapper[4631]: E1204 17:29:25.143396 4631 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.145548 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.145604 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.145621 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.145641 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.145657 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:25Z","lastTransitionTime":"2025-12-04T17:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.248349 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.248433 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.248449 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.248469 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.248485 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:25Z","lastTransitionTime":"2025-12-04T17:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.351314 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.351398 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.351413 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.351430 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.351444 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:25Z","lastTransitionTime":"2025-12-04T17:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.453466 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.453497 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.453504 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.453516 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.453526 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:25Z","lastTransitionTime":"2025-12-04T17:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.556485 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.556516 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.556525 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.556538 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.556548 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:25Z","lastTransitionTime":"2025-12-04T17:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.660035 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.660117 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.660142 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.660174 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.660196 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:25Z","lastTransitionTime":"2025-12-04T17:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.763985 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.764047 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.764066 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.764094 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.764116 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:25Z","lastTransitionTime":"2025-12-04T17:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.867926 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.868014 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.868035 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.868068 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.868093 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:25Z","lastTransitionTime":"2025-12-04T17:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.971484 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.971560 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.971579 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.971606 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:25 crc kubenswrapper[4631]: I1204 17:29:25.971626 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:25Z","lastTransitionTime":"2025-12-04T17:29:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.081200 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.082193 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.082362 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.082546 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.082682 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:26Z","lastTransitionTime":"2025-12-04T17:29:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.186684 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.186746 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.186767 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.186792 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.186811 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:26Z","lastTransitionTime":"2025-12-04T17:29:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.238542 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.238616 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.238713 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.238640 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:26 crc kubenswrapper[4631]: E1204 17:29:26.238899 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:26 crc kubenswrapper[4631]: E1204 17:29:26.239028 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:26 crc kubenswrapper[4631]: E1204 17:29:26.239142 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:26 crc kubenswrapper[4631]: E1204 17:29:26.239220 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.290456 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.290950 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.291100 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.291244 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.291365 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:26Z","lastTransitionTime":"2025-12-04T17:29:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.395010 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.395112 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.395141 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.395188 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.395223 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:26Z","lastTransitionTime":"2025-12-04T17:29:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.499099 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.499174 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.499193 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.499223 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.499243 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:26Z","lastTransitionTime":"2025-12-04T17:29:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.603051 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.603143 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.603168 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.603205 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.603232 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:26Z","lastTransitionTime":"2025-12-04T17:29:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.706274 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.706354 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.706418 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.706456 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.706479 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:26Z","lastTransitionTime":"2025-12-04T17:29:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.810922 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.810981 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.811011 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.811033 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.811043 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:26Z","lastTransitionTime":"2025-12-04T17:29:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.914304 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.914355 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.914389 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.914411 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:26 crc kubenswrapper[4631]: I1204 17:29:26.914426 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:26Z","lastTransitionTime":"2025-12-04T17:29:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.018544 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.018598 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.018608 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.018626 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.018641 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:27Z","lastTransitionTime":"2025-12-04T17:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.122159 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.122232 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.122252 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.122284 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.122310 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:27Z","lastTransitionTime":"2025-12-04T17:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.226493 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.226571 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.226596 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.226633 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.226661 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:27Z","lastTransitionTime":"2025-12-04T17:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.330571 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.330609 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.330618 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.330634 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.330643 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:27Z","lastTransitionTime":"2025-12-04T17:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.434123 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.434173 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.434206 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.434227 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.434239 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:27Z","lastTransitionTime":"2025-12-04T17:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.565927 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.566028 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.566047 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.566112 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.566138 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:27Z","lastTransitionTime":"2025-12-04T17:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.670664 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.670749 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.670769 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.670798 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.670826 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:27Z","lastTransitionTime":"2025-12-04T17:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.774290 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.774358 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.774408 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.774438 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.774459 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:27Z","lastTransitionTime":"2025-12-04T17:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.877978 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.878047 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.878066 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.878094 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.878113 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:27Z","lastTransitionTime":"2025-12-04T17:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.981148 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.981220 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.981240 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.981305 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:27 crc kubenswrapper[4631]: I1204 17:29:27.981328 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:27Z","lastTransitionTime":"2025-12-04T17:29:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.084961 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.085046 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.085070 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.085104 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.085130 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:28Z","lastTransitionTime":"2025-12-04T17:29:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.188177 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.189087 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.189276 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.189579 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.189709 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:28Z","lastTransitionTime":"2025-12-04T17:29:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.239113 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.239134 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:28 crc kubenswrapper[4631]: E1204 17:29:28.239318 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.239144 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:28 crc kubenswrapper[4631]: E1204 17:29:28.239536 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:28 crc kubenswrapper[4631]: E1204 17:29:28.239661 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.239213 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:28 crc kubenswrapper[4631]: E1204 17:29:28.240267 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.292986 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.293299 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.293419 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.293519 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.293664 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:28Z","lastTransitionTime":"2025-12-04T17:29:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.396941 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.397295 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.397431 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.397531 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.397733 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:28Z","lastTransitionTime":"2025-12-04T17:29:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.500565 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.500902 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.501041 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.501163 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.501290 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:28Z","lastTransitionTime":"2025-12-04T17:29:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.604512 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.604568 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.604582 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.604605 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.604621 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:28Z","lastTransitionTime":"2025-12-04T17:29:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.708812 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.708868 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.708886 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.708913 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.708931 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:28Z","lastTransitionTime":"2025-12-04T17:29:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.813254 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.813323 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.813342 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.813395 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.813417 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:28Z","lastTransitionTime":"2025-12-04T17:29:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.916528 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.916603 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.916628 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.916664 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:28 crc kubenswrapper[4631]: I1204 17:29:28.916745 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:28Z","lastTransitionTime":"2025-12-04T17:29:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.021900 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.022002 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.022025 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.022052 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.022074 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:29Z","lastTransitionTime":"2025-12-04T17:29:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.124546 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.124602 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.124614 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.124634 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.124647 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:29Z","lastTransitionTime":"2025-12-04T17:29:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.228609 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.230502 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.230752 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.230923 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.231066 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:29Z","lastTransitionTime":"2025-12-04T17:29:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.335083 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.335134 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.335152 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.335176 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.335191 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:29Z","lastTransitionTime":"2025-12-04T17:29:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.440128 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.440171 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.440183 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.440203 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.440215 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:29Z","lastTransitionTime":"2025-12-04T17:29:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.543796 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.543856 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.543874 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.543901 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.543949 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:29Z","lastTransitionTime":"2025-12-04T17:29:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.647623 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.647690 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.647709 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.647739 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.647760 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:29Z","lastTransitionTime":"2025-12-04T17:29:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.751459 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.751516 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.751528 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.751549 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.751561 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:29Z","lastTransitionTime":"2025-12-04T17:29:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.855510 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.855599 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.855613 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.855634 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.855649 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:29Z","lastTransitionTime":"2025-12-04T17:29:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.958764 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.958810 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.958823 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.958843 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:29 crc kubenswrapper[4631]: I1204 17:29:29.958853 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:29Z","lastTransitionTime":"2025-12-04T17:29:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.062412 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.062466 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.062483 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.062507 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.062525 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:30Z","lastTransitionTime":"2025-12-04T17:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.166654 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.166738 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.166759 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.166788 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.166809 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:30Z","lastTransitionTime":"2025-12-04T17:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.238698 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.238906 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:30 crc kubenswrapper[4631]: E1204 17:29:30.238908 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:30 crc kubenswrapper[4631]: E1204 17:29:30.238992 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.238994 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.239069 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:30 crc kubenswrapper[4631]: E1204 17:29:30.239139 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:30 crc kubenswrapper[4631]: E1204 17:29:30.239208 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.259025 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zllp2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03e821a0-13d4-417c-9e54-7073b08490db\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efdd152e4738f125d721a6d044c0d96a378761e28d18f292c85706d0f3158f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:29:03Z\\\",\\\"message\\\":\\\"2025-12-04T17:28:18+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7912288c-7bc7-4aeb-8144-7db1229d544b\\\\n2025-12-04T17:28:18+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7912288c-7bc7-4aeb-8144-7db1229d544b to /host/opt/cni/bin/\\\\n2025-12-04T17:28:18Z [verbose] multus-daemon started\\\\n2025-12-04T17:28:18Z [verbose] Readiness Indicator file check\\\\n2025-12-04T17:29:03Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:29:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d5rnb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zllp2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.270737 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.270792 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.270813 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.270833 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.270846 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:30Z","lastTransitionTime":"2025-12-04T17:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.271593 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"431a4fcf-a830-43a4-94a0-b4c6d871d52f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5556f68691980174d165f902e2983cffe12a923d267b5615c07f9dc7da73efa0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581273bead771845e5261f066fc22bbde1f8a36db2db091b00a9e008fcb181e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://581273bead771845e5261f066fc22bbde1f8a36db2db091b00a9e008fcb181e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.285510 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"892b8b38-5846-4b50-aebe-3f53cdd9af30\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f1d9b4c62a9b9e7a758388c697682a55e935ba52241faf6a5a31def2817eb95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e1a1c1bb6a41d132c1d5504b34005f6369e67538bf51b364bf1accbbe43b357\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758a28995138e30117bb5c83060f17187d0a9ca3f6d9e181d8cc63daf8b0d2ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.299937 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf70493c-4094-4783-939d-ac61051c83ac\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2232c209adbed3161e31474c06d86e5184985a392f0e7f7c115889bf80ad5266\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17ca1bf820b90d0dcab27c3f59301d671ae001940de1b634174e371a77a5208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3b24dbe6d9b284dd49778fcde6a5d7b665e2fca8181a7cb11c2d14028da9fb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bef5f50f9d76131366d2cfb128761b82dafa2d9e82ace869449c3c23b35dfef0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.318080 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.335453 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bf7b08d11583cfe95a77de4f19ac3df718d592c69f8edcecad92ec68fa43b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.350793 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc938ac1-b2a3-4435-bda5-c7be66763a01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50e9bc86485c0e04bded7b9670dae6b4d81020cea8eca728644171e99d86dcbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6788\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q27wh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.362049 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29a77294-77b9-4074-9cb9-e31985b67447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://712c54ce6d416f06964e2a50bf902d2c18b7bf713a9d67265bfbadc2fbf4ee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96709e063c20b40adae03f18364ec6297f144e6454670c5ab760f150837e165\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-45vbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-88qcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.374471 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.374548 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.374568 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.374593 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.374610 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:30Z","lastTransitionTime":"2025-12-04T17:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.377984 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cca995e5-9ce2-4996-b131-60c489ff721e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:27:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:27:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:00Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:27:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:27:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:27:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.393405 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4fc3914ff5398da85d6fa71a83f22ad26fa9a7be2711310e227f1e99f2674f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1937ccdaf767eab0506126f0a288e889035fdc6d42a4cca49b985adeaa7bbb86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.405763 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1a26b239d4c93294e5e594dbed2567b59d08bf63525c162c2aae82d37dea85f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.427297 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de540d5a-4ce4-4960-b721-75c6d9a7a02e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba70b7c7b4490c52de142f987f90003e30447d5ce604907dccde8d3f64f39a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5952c646546fda2a6aae763b6a37811e233147e1cce5c01e4d14dab3f866b3d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa46f0fd665196141ed3d90bd15b11142e70d9608f42d2730ae148cd317dd4ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b121fd90ea02e7063287347173195425ab1d88f3ea15c9f1e0a5a1f895dd16\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://209ced1d8d2119cbba0410345e4604c64934226d84f899d14795add2797ab4a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3c5bfb1f568a154380fd11b74a30a4f8e6b84ce483c9c4a965a069ef3161dbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b87a8a12a4d0316610b21ba425418d26765ec6b589f995ffe381985c260b1c55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2xcwj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bfhhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.443343 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6ns2w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3859fd47-f59f-4401-b195-a8a9e227c472\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f66330cb22fa8a7354c2299894c369b4a6190141ae0bdbe1650f52c8e63a1268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhnkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:21Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6ns2w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.459864 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dzxft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"35aedb25-9161-4c4e-a563-097dd7c4bc7e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1034eba7fa53fbbbb86b749960d7559ecfe6bd58212b84a1b2c5dff771be2c79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-68wxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dzxft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.478045 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.478447 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.478533 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.478616 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.478682 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:30Z","lastTransitionTime":"2025-12-04T17:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.480297 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d617abc-dc04-4807-b684-3640cde38e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04cf802bdf1fd0d81a685ca8171bfcea89086fdd4979fcc18df7c134d482e052\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04cf802bdf1fd0d81a685ca8171bfcea89086fdd4979fcc18df7c134d482e052\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-04T17:29:22Z\\\",\\\"message\\\":\\\"led objects of type *v1.Pod\\\\nI1204 17:29:22.424260 6484 obj_retry.go:409] Going to retry *v1.Pod resource setup for 13 objects: [openshift-network-operator/iptables-alerter-4ln5h openshift-image-registry/node-ca-6ns2w openshift-multus/multus-additional-cni-plugins-bfhhc openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-ovn-kubernetes/ovnkube-node-vpgzg openshift-kube-controller-manager/kube-controller-manager-crc openshift-kube-scheduler/openshift-kube-scheduler-crc openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-network-operator/network-operator-58b4c7f79c-55gtf openshift-multus/network-metrics-daemon-8kcrj openshift-dns/node-resolver-dzxft openshift-kube-apiserver/kube-apiserver-crc openshift-machine-config-operator/kube-rbac-proxy-crio-crc]\\\\nI1204 17:29:22.424291 6484 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nF1204 17:29:22.424311 6484 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-04T17:29:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-04T17:28:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-04T17:28:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-04T17:28:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vtfmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vpgzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.494387 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fm8tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-04T17:28:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8kcrj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.510617 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.525887 4631 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-04T17:28:16Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:30Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.582629 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.582673 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.582684 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.582703 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.582733 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:30Z","lastTransitionTime":"2025-12-04T17:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.694287 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.694330 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.694342 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.694363 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.694394 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:30Z","lastTransitionTime":"2025-12-04T17:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.797473 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.797537 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.797551 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.797572 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.797586 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:30Z","lastTransitionTime":"2025-12-04T17:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.900489 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.900537 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.900545 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.900562 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:30 crc kubenswrapper[4631]: I1204 17:29:30.900573 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:30Z","lastTransitionTime":"2025-12-04T17:29:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.003915 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.003950 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.003959 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.003974 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.003982 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:31Z","lastTransitionTime":"2025-12-04T17:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.107065 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.107108 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.107121 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.107189 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.107219 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:31Z","lastTransitionTime":"2025-12-04T17:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.210800 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.210860 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.210872 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.210892 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.210903 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:31Z","lastTransitionTime":"2025-12-04T17:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.314245 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.314311 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.314325 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.314349 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.314408 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:31Z","lastTransitionTime":"2025-12-04T17:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.417392 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.417437 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.417446 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.417465 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.417477 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:31Z","lastTransitionTime":"2025-12-04T17:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.520573 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.520624 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.520637 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.520654 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.520669 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:31Z","lastTransitionTime":"2025-12-04T17:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.624098 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.624181 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.624200 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.624230 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.624252 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:31Z","lastTransitionTime":"2025-12-04T17:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.728403 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.728463 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.728474 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.728498 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.728511 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:31Z","lastTransitionTime":"2025-12-04T17:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.832029 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.832095 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.832112 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.832137 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.832158 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:31Z","lastTransitionTime":"2025-12-04T17:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.935594 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.935665 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.935683 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.935715 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:31 crc kubenswrapper[4631]: I1204 17:29:31.935736 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:31Z","lastTransitionTime":"2025-12-04T17:29:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.038125 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.038191 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.038206 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.038232 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.038250 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:32Z","lastTransitionTime":"2025-12-04T17:29:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.141741 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.141790 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.141801 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.141820 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.141833 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:32Z","lastTransitionTime":"2025-12-04T17:29:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.238942 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.239091 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.239441 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:32 crc kubenswrapper[4631]: E1204 17:29:32.239687 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.239791 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:32 crc kubenswrapper[4631]: E1204 17:29:32.240015 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:32 crc kubenswrapper[4631]: E1204 17:29:32.240045 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:32 crc kubenswrapper[4631]: E1204 17:29:32.240130 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.245682 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.245927 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.246007 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.246098 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.246160 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:32Z","lastTransitionTime":"2025-12-04T17:29:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.348320 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.348358 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.348383 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.348399 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.348409 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:32Z","lastTransitionTime":"2025-12-04T17:29:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.451123 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.451191 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.451210 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.451233 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.451246 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:32Z","lastTransitionTime":"2025-12-04T17:29:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.553794 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.553825 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.553833 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.553846 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.553854 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:32Z","lastTransitionTime":"2025-12-04T17:29:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.655842 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.655910 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.655921 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.655937 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.655949 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:32Z","lastTransitionTime":"2025-12-04T17:29:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.759221 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.759548 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.759696 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.759796 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.760158 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:32Z","lastTransitionTime":"2025-12-04T17:29:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.863560 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.863937 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.864138 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.864521 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.864652 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:32Z","lastTransitionTime":"2025-12-04T17:29:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.968835 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.968906 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.968927 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.968956 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:32 crc kubenswrapper[4631]: I1204 17:29:32.968975 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:32Z","lastTransitionTime":"2025-12-04T17:29:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.072212 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.072297 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.072321 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.072356 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.072433 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:33Z","lastTransitionTime":"2025-12-04T17:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.175601 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.176043 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.176189 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.176415 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.176630 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:33Z","lastTransitionTime":"2025-12-04T17:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.280076 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.280606 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.280771 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.280930 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.281068 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:33Z","lastTransitionTime":"2025-12-04T17:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.384768 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.384825 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.384837 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.384861 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.384876 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:33Z","lastTransitionTime":"2025-12-04T17:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.488416 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.488508 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.488537 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.488571 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.488597 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:33Z","lastTransitionTime":"2025-12-04T17:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.590827 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.590883 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.590895 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.590916 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.590929 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:33Z","lastTransitionTime":"2025-12-04T17:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.693638 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.693686 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.693700 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.693722 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.693769 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:33Z","lastTransitionTime":"2025-12-04T17:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.796278 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.796342 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.796365 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.796437 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.796459 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:33Z","lastTransitionTime":"2025-12-04T17:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.899714 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.900150 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.900172 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.900197 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:33 crc kubenswrapper[4631]: I1204 17:29:33.900215 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:33Z","lastTransitionTime":"2025-12-04T17:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.004600 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.004670 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.004685 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.004729 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.004750 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:34Z","lastTransitionTime":"2025-12-04T17:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.107908 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.107991 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.108017 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.108053 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.108082 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:34Z","lastTransitionTime":"2025-12-04T17:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.211101 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.211219 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.211242 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.211265 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.211283 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:34Z","lastTransitionTime":"2025-12-04T17:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.238837 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.238873 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:34 crc kubenswrapper[4631]: E1204 17:29:34.239040 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.239120 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.239342 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:34 crc kubenswrapper[4631]: E1204 17:29:34.239350 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:34 crc kubenswrapper[4631]: E1204 17:29:34.239516 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:34 crc kubenswrapper[4631]: E1204 17:29:34.239620 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.313935 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.313987 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.314004 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.314024 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.314038 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:34Z","lastTransitionTime":"2025-12-04T17:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.417165 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.417235 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.417274 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.417304 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.417333 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:34Z","lastTransitionTime":"2025-12-04T17:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.521121 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.521172 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.521185 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.521202 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.521219 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:34Z","lastTransitionTime":"2025-12-04T17:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.625000 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.625072 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.625091 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.625123 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.625156 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:34Z","lastTransitionTime":"2025-12-04T17:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.712862 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs\") pod \"network-metrics-daemon-8kcrj\" (UID: \"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\") " pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:34 crc kubenswrapper[4631]: E1204 17:29:34.713162 4631 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 17:29:34 crc kubenswrapper[4631]: E1204 17:29:34.713299 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs podName:86903bd1-674d-4fa2-b9d1-dbc8f347b72b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:38.713267964 +0000 UTC m=+168.745509992 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs") pod "network-metrics-daemon-8kcrj" (UID: "86903bd1-674d-4fa2-b9d1-dbc8f347b72b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.728588 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.728652 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.728672 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.728697 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.728715 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:34Z","lastTransitionTime":"2025-12-04T17:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.831977 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.832029 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.832040 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.832058 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.832070 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:34Z","lastTransitionTime":"2025-12-04T17:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.934181 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.934261 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.934287 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.934317 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:34 crc kubenswrapper[4631]: I1204 17:29:34.934344 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:34Z","lastTransitionTime":"2025-12-04T17:29:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.037634 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.037684 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.037698 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.037720 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.037734 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:35Z","lastTransitionTime":"2025-12-04T17:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.140836 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.140884 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.140897 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.140915 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.140926 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:35Z","lastTransitionTime":"2025-12-04T17:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.243218 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.243273 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.243289 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.243307 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.243320 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:35Z","lastTransitionTime":"2025-12-04T17:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.253279 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.253332 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.253353 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.253410 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.253431 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:35Z","lastTransitionTime":"2025-12-04T17:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:35 crc kubenswrapper[4631]: E1204 17:29:35.268810 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:35Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.273008 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.273056 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.273067 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.273079 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.273088 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:35Z","lastTransitionTime":"2025-12-04T17:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:35 crc kubenswrapper[4631]: E1204 17:29:35.285775 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:35Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.289287 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.289311 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.289319 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.289331 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.289339 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:35Z","lastTransitionTime":"2025-12-04T17:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:35 crc kubenswrapper[4631]: E1204 17:29:35.301712 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:35Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.305997 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.306035 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.306046 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.306062 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.306074 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:35Z","lastTransitionTime":"2025-12-04T17:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:35 crc kubenswrapper[4631]: E1204 17:29:35.316866 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:35Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.320260 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.320295 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.320325 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.320342 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.320354 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:35Z","lastTransitionTime":"2025-12-04T17:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:35 crc kubenswrapper[4631]: E1204 17:29:35.331779 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-04T17:29:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"45733624-1f47-45a2-a6e9-c03f47562993\\\",\\\"systemUUID\\\":\\\"c04d3b5b-9d92-423d-a922-0c9769c3a8b4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-04T17:29:35Z is after 2025-08-24T17:21:41Z" Dec 04 17:29:35 crc kubenswrapper[4631]: E1204 17:29:35.331923 4631 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.345288 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.345339 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.345348 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.345404 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.345417 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:35Z","lastTransitionTime":"2025-12-04T17:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.447984 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.448023 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.448033 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.448051 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.448061 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:35Z","lastTransitionTime":"2025-12-04T17:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.550764 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.550815 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.550828 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.550852 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.550865 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:35Z","lastTransitionTime":"2025-12-04T17:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.654206 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.654263 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.654278 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.654294 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.654305 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:35Z","lastTransitionTime":"2025-12-04T17:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.756632 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.756694 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.756725 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.756740 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.756766 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:35Z","lastTransitionTime":"2025-12-04T17:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.859833 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.859890 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.859907 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.859927 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.859943 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:35Z","lastTransitionTime":"2025-12-04T17:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.963103 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.963152 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.963163 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.963178 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:35 crc kubenswrapper[4631]: I1204 17:29:35.963188 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:35Z","lastTransitionTime":"2025-12-04T17:29:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.066320 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.066356 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.066387 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.066405 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.066420 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:36Z","lastTransitionTime":"2025-12-04T17:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.169429 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.169481 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.169497 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.169521 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.169539 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:36Z","lastTransitionTime":"2025-12-04T17:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.238639 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.238758 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.238778 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:36 crc kubenswrapper[4631]: E1204 17:29:36.238957 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.239001 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:36 crc kubenswrapper[4631]: E1204 17:29:36.239097 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:36 crc kubenswrapper[4631]: E1204 17:29:36.239317 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:36 crc kubenswrapper[4631]: E1204 17:29:36.239363 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.272528 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.272593 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.272604 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.272618 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.272632 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:36Z","lastTransitionTime":"2025-12-04T17:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.376103 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.376162 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.376223 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.376254 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.376281 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:36Z","lastTransitionTime":"2025-12-04T17:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.481159 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.481220 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.481240 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.481264 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.481282 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:36Z","lastTransitionTime":"2025-12-04T17:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.584535 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.584585 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.584597 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.584613 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.584626 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:36Z","lastTransitionTime":"2025-12-04T17:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.687193 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.687277 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.687292 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.687311 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.687325 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:36Z","lastTransitionTime":"2025-12-04T17:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.789958 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.789998 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.790009 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.790025 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.790036 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:36Z","lastTransitionTime":"2025-12-04T17:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.892336 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.892413 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.892424 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.892438 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.892450 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:36Z","lastTransitionTime":"2025-12-04T17:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.994837 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.994897 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.994913 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.994934 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:36 crc kubenswrapper[4631]: I1204 17:29:36.994947 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:36Z","lastTransitionTime":"2025-12-04T17:29:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.097956 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.098008 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.098024 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.098043 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.098056 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:37Z","lastTransitionTime":"2025-12-04T17:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.201558 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.201615 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.201631 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.201654 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.201676 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:37Z","lastTransitionTime":"2025-12-04T17:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.304481 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.304550 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.304571 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.304595 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.304615 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:37Z","lastTransitionTime":"2025-12-04T17:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.407538 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.407600 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.407616 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.407640 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.407659 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:37Z","lastTransitionTime":"2025-12-04T17:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.510695 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.510754 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.510773 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.510795 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.510813 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:37Z","lastTransitionTime":"2025-12-04T17:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.612945 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.612989 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.612999 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.613014 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.613025 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:37Z","lastTransitionTime":"2025-12-04T17:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.734192 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.734240 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.734254 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.734271 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.734283 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:37Z","lastTransitionTime":"2025-12-04T17:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.837156 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.837217 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.837233 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.837258 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.837272 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:37Z","lastTransitionTime":"2025-12-04T17:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.940675 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.940736 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.940748 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.940764 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:37 crc kubenswrapper[4631]: I1204 17:29:37.940777 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:37Z","lastTransitionTime":"2025-12-04T17:29:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.043987 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.044069 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.044085 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.044105 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.044147 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:38Z","lastTransitionTime":"2025-12-04T17:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.146745 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.146789 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.146800 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.146816 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.146863 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:38Z","lastTransitionTime":"2025-12-04T17:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.239481 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.239528 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.239647 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.239696 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:38 crc kubenswrapper[4631]: E1204 17:29:38.239841 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:38 crc kubenswrapper[4631]: E1204 17:29:38.239942 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:38 crc kubenswrapper[4631]: E1204 17:29:38.240467 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:38 crc kubenswrapper[4631]: E1204 17:29:38.241079 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.241518 4631 scope.go:117] "RemoveContainer" containerID="04cf802bdf1fd0d81a685ca8171bfcea89086fdd4979fcc18df7c134d482e052" Dec 04 17:29:38 crc kubenswrapper[4631]: E1204 17:29:38.241817 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podUID="0d617abc-dc04-4807-b684-3640cde38e81" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.249195 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.249259 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.249276 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.249302 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.249319 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:38Z","lastTransitionTime":"2025-12-04T17:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.352509 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.352582 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.352596 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.352615 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.352652 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:38Z","lastTransitionTime":"2025-12-04T17:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.456073 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.456145 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.456161 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.456187 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.456211 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:38Z","lastTransitionTime":"2025-12-04T17:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.559431 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.559495 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.559510 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.559539 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.559555 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:38Z","lastTransitionTime":"2025-12-04T17:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.662650 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.662691 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.662702 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.662719 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.662731 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:38Z","lastTransitionTime":"2025-12-04T17:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.766145 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.766214 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.766232 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.766267 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.766288 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:38Z","lastTransitionTime":"2025-12-04T17:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.868966 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.869016 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.869026 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.869045 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.869058 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:38Z","lastTransitionTime":"2025-12-04T17:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.970869 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.970922 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.970932 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.970984 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:38 crc kubenswrapper[4631]: I1204 17:29:38.971001 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:38Z","lastTransitionTime":"2025-12-04T17:29:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.073545 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.073598 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.073615 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.073638 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.073655 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:39Z","lastTransitionTime":"2025-12-04T17:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.176352 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.176422 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.176434 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.176447 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.176456 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:39Z","lastTransitionTime":"2025-12-04T17:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.257338 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.278897 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.278962 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.278980 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.279006 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.279025 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:39Z","lastTransitionTime":"2025-12-04T17:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.384011 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.384086 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.384098 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.384116 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.384131 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:39Z","lastTransitionTime":"2025-12-04T17:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.486978 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.487022 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.487034 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.487053 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.487075 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:39Z","lastTransitionTime":"2025-12-04T17:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.591055 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.591124 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.591143 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.591172 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.591199 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:39Z","lastTransitionTime":"2025-12-04T17:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.694498 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.694542 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.694558 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.694582 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.694599 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:39Z","lastTransitionTime":"2025-12-04T17:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.799428 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.799474 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.799483 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.799500 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.799510 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:39Z","lastTransitionTime":"2025-12-04T17:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.902299 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.902345 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.902356 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.902398 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:39 crc kubenswrapper[4631]: I1204 17:29:39.902409 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:39Z","lastTransitionTime":"2025-12-04T17:29:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.005503 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.005559 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.005570 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.005588 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.005600 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:40Z","lastTransitionTime":"2025-12-04T17:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.108004 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.108073 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.108085 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.108107 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.108122 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:40Z","lastTransitionTime":"2025-12-04T17:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.210960 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.211025 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.211036 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.211052 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.211066 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:40Z","lastTransitionTime":"2025-12-04T17:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.238640 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.238737 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:40 crc kubenswrapper[4631]: E1204 17:29:40.238770 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:40 crc kubenswrapper[4631]: E1204 17:29:40.238974 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.239100 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.239219 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:40 crc kubenswrapper[4631]: E1204 17:29:40.239501 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:40 crc kubenswrapper[4631]: E1204 17:29:40.239638 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.283544 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-bfhhc" podStartSLOduration=84.283508298 podStartE2EDuration="1m24.283508298s" podCreationTimestamp="2025-12-04 17:28:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:29:40.267779947 +0000 UTC m=+110.300021965" watchObservedRunningTime="2025-12-04 17:29:40.283508298 +0000 UTC m=+110.315750316" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.303701 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-6ns2w" podStartSLOduration=83.303671255 podStartE2EDuration="1m23.303671255s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:29:40.285242167 +0000 UTC m=+110.317484195" watchObservedRunningTime="2025-12-04 17:29:40.303671255 +0000 UTC m=+110.335913263" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.315830 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.315884 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.315902 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.315929 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.315949 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:40Z","lastTransitionTime":"2025-12-04T17:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.338499 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-88qcn" podStartSLOduration=83.338473422 podStartE2EDuration="1m23.338473422s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:29:40.305633831 +0000 UTC m=+110.337875849" watchObservedRunningTime="2025-12-04 17:29:40.338473422 +0000 UTC m=+110.370715430" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.354693 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=1.354671956 podStartE2EDuration="1.354671956s" podCreationTimestamp="2025-12-04 17:29:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:29:40.338920725 +0000 UTC m=+110.371162743" watchObservedRunningTime="2025-12-04 17:29:40.354671956 +0000 UTC m=+110.386913954" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.371246 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=84.37122156 podStartE2EDuration="1m24.37122156s" podCreationTimestamp="2025-12-04 17:28:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:29:40.354540532 +0000 UTC m=+110.386782550" watchObservedRunningTime="2025-12-04 17:29:40.37122156 +0000 UTC m=+110.403463558" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.418034 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.418087 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.418099 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.418121 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.418137 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:40Z","lastTransitionTime":"2025-12-04T17:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.425211 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-dzxft" podStartSLOduration=84.425193316 podStartE2EDuration="1m24.425193316s" podCreationTimestamp="2025-12-04 17:28:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:29:40.399031697 +0000 UTC m=+110.431273695" watchObservedRunningTime="2025-12-04 17:29:40.425193316 +0000 UTC m=+110.457435314" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.521423 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.521494 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.521507 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.521526 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.521548 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:40Z","lastTransitionTime":"2025-12-04T17:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.525735 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podStartSLOduration=84.525706445 podStartE2EDuration="1m24.525706445s" podCreationTimestamp="2025-12-04 17:28:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:29:40.525125538 +0000 UTC m=+110.557367536" watchObservedRunningTime="2025-12-04 17:29:40.525706445 +0000 UTC m=+110.557948443" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.549281 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-zllp2" podStartSLOduration=84.54925133 podStartE2EDuration="1m24.54925133s" podCreationTimestamp="2025-12-04 17:28:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:29:40.548735235 +0000 UTC m=+110.580977233" watchObservedRunningTime="2025-12-04 17:29:40.54925133 +0000 UTC m=+110.581493338" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.580054 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=32.580027801 podStartE2EDuration="32.580027801s" podCreationTimestamp="2025-12-04 17:29:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:29:40.56673511 +0000 UTC m=+110.598977108" watchObservedRunningTime="2025-12-04 17:29:40.580027801 +0000 UTC m=+110.612269799" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.580421 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=84.580413542 podStartE2EDuration="1m24.580413542s" podCreationTimestamp="2025-12-04 17:28:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:29:40.579669541 +0000 UTC m=+110.611911559" watchObservedRunningTime="2025-12-04 17:29:40.580413542 +0000 UTC m=+110.612655540" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.592905 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=47.592881169 podStartE2EDuration="47.592881169s" podCreationTimestamp="2025-12-04 17:28:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:29:40.592421526 +0000 UTC m=+110.624663524" watchObservedRunningTime="2025-12-04 17:29:40.592881169 +0000 UTC m=+110.625123177" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.624529 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.624589 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.624603 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.624621 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.624632 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:40Z","lastTransitionTime":"2025-12-04T17:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.727483 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.727552 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.727563 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.727583 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.727594 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:40Z","lastTransitionTime":"2025-12-04T17:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.831259 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.831338 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.831357 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.831420 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.831444 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:40Z","lastTransitionTime":"2025-12-04T17:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.934717 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.934872 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.934904 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.934981 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:40 crc kubenswrapper[4631]: I1204 17:29:40.935010 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:40Z","lastTransitionTime":"2025-12-04T17:29:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.038746 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.038815 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.038828 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.038851 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.038866 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:41Z","lastTransitionTime":"2025-12-04T17:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.141838 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.141902 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.141919 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.141945 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.141963 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:41Z","lastTransitionTime":"2025-12-04T17:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.245500 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.245608 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.245627 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.245651 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.245671 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:41Z","lastTransitionTime":"2025-12-04T17:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.349449 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.349536 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.349556 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.349579 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.349628 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:41Z","lastTransitionTime":"2025-12-04T17:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.451776 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.451808 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.451817 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.451830 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.451838 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:41Z","lastTransitionTime":"2025-12-04T17:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.554983 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.555067 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.555088 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.555118 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.555136 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:41Z","lastTransitionTime":"2025-12-04T17:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.657445 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.657482 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.657491 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.657506 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.657517 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:41Z","lastTransitionTime":"2025-12-04T17:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.760159 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.760202 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.760212 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.760226 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.760236 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:41Z","lastTransitionTime":"2025-12-04T17:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.863333 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.863390 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.863404 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.863421 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.863433 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:41Z","lastTransitionTime":"2025-12-04T17:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.966405 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.966467 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.966486 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.966505 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:41 crc kubenswrapper[4631]: I1204 17:29:41.966519 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:41Z","lastTransitionTime":"2025-12-04T17:29:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.069634 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.069682 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.069692 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.069709 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.069720 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:42Z","lastTransitionTime":"2025-12-04T17:29:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.173081 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.173391 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.173404 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.173421 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.173431 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:42Z","lastTransitionTime":"2025-12-04T17:29:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.239749 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.240122 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:42 crc kubenswrapper[4631]: E1204 17:29:42.240146 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:42 crc kubenswrapper[4631]: E1204 17:29:42.240322 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.239841 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:42 crc kubenswrapper[4631]: E1204 17:29:42.240561 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.239756 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:42 crc kubenswrapper[4631]: E1204 17:29:42.240696 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.277278 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.277347 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.277408 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.277448 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.277483 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:42Z","lastTransitionTime":"2025-12-04T17:29:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.379744 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.379813 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.379836 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.379864 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.379890 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:42Z","lastTransitionTime":"2025-12-04T17:29:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.482679 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.482721 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.482731 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.482748 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.482759 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:42Z","lastTransitionTime":"2025-12-04T17:29:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.585347 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.585448 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.585472 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.585500 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.585519 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:42Z","lastTransitionTime":"2025-12-04T17:29:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.693786 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.693840 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.693855 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.693878 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.693895 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:42Z","lastTransitionTime":"2025-12-04T17:29:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.820891 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.820959 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.820983 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.821013 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.821039 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:42Z","lastTransitionTime":"2025-12-04T17:29:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.923586 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.923627 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.923636 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.923649 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:42 crc kubenswrapper[4631]: I1204 17:29:42.923658 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:42Z","lastTransitionTime":"2025-12-04T17:29:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.026233 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.026301 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.026318 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.026346 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.026366 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:43Z","lastTransitionTime":"2025-12-04T17:29:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.129714 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.129760 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.129770 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.129785 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.129797 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:43Z","lastTransitionTime":"2025-12-04T17:29:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.232286 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.232323 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.232331 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.232346 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.232355 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:43Z","lastTransitionTime":"2025-12-04T17:29:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.334320 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.334406 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.334422 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.334443 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.334458 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:43Z","lastTransitionTime":"2025-12-04T17:29:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.437447 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.437555 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.437572 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.437594 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.437610 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:43Z","lastTransitionTime":"2025-12-04T17:29:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.539703 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.539754 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.539765 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.539780 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.539793 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:43Z","lastTransitionTime":"2025-12-04T17:29:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.643353 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.643419 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.643431 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.643448 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.643460 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:43Z","lastTransitionTime":"2025-12-04T17:29:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.746559 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.747122 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.747147 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.747167 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.747181 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:43Z","lastTransitionTime":"2025-12-04T17:29:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.849160 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.849205 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.849216 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.849234 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.849248 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:43Z","lastTransitionTime":"2025-12-04T17:29:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.951280 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.951415 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.951425 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.951454 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:43 crc kubenswrapper[4631]: I1204 17:29:43.951467 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:43Z","lastTransitionTime":"2025-12-04T17:29:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.054137 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.054204 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.054218 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.054233 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.054244 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:44Z","lastTransitionTime":"2025-12-04T17:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.157249 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.157336 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.157354 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.157427 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.157442 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:44Z","lastTransitionTime":"2025-12-04T17:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.238581 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.238637 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.238994 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:44 crc kubenswrapper[4631]: E1204 17:29:44.238989 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.239078 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:44 crc kubenswrapper[4631]: E1204 17:29:44.239192 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:44 crc kubenswrapper[4631]: E1204 17:29:44.239459 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:44 crc kubenswrapper[4631]: E1204 17:29:44.239828 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.260177 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.260232 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.260247 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.260274 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.260290 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:44Z","lastTransitionTime":"2025-12-04T17:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.363315 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.363407 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.363420 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.363458 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.363473 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:44Z","lastTransitionTime":"2025-12-04T17:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.465639 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.465680 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.465692 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.465708 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.465719 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:44Z","lastTransitionTime":"2025-12-04T17:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.569054 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.569092 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.569103 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.569121 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.569133 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:44Z","lastTransitionTime":"2025-12-04T17:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.672217 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.672290 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.672314 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.672414 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.672442 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:44Z","lastTransitionTime":"2025-12-04T17:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.775492 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.775587 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.775604 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.775621 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.775664 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:44Z","lastTransitionTime":"2025-12-04T17:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.878277 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.878316 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.878343 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.878359 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.878380 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:44Z","lastTransitionTime":"2025-12-04T17:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.980487 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.980534 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.980545 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.980561 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:44 crc kubenswrapper[4631]: I1204 17:29:44.980573 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:44Z","lastTransitionTime":"2025-12-04T17:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.083861 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.083924 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.083941 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.083965 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.083984 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:45Z","lastTransitionTime":"2025-12-04T17:29:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.186436 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.186476 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.186484 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.186498 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.186506 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:45Z","lastTransitionTime":"2025-12-04T17:29:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.291858 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.291903 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.291915 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.291931 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.291942 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:45Z","lastTransitionTime":"2025-12-04T17:29:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.394066 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.394112 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.394122 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.394137 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.394147 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:45Z","lastTransitionTime":"2025-12-04T17:29:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.496946 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.496984 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.496994 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.497065 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.497077 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:45Z","lastTransitionTime":"2025-12-04T17:29:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.599872 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.599917 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.599931 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.599953 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.599969 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:45Z","lastTransitionTime":"2025-12-04T17:29:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.663584 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.664017 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.664175 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.664393 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.664560 4631 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-04T17:29:45Z","lastTransitionTime":"2025-12-04T17:29:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.728319 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7"] Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.728874 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.730963 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.734637 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.734777 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.740023 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.754197 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4981cd0-a3e1-48a0-82cb-369d6ead7921-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-qjgg7\" (UID: \"a4981cd0-a3e1-48a0-82cb-369d6ead7921\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.754264 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a4981cd0-a3e1-48a0-82cb-369d6ead7921-service-ca\") pod \"cluster-version-operator-5c965bbfc6-qjgg7\" (UID: \"a4981cd0-a3e1-48a0-82cb-369d6ead7921\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.754294 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a4981cd0-a3e1-48a0-82cb-369d6ead7921-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-qjgg7\" (UID: \"a4981cd0-a3e1-48a0-82cb-369d6ead7921\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.754354 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a4981cd0-a3e1-48a0-82cb-369d6ead7921-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-qjgg7\" (UID: \"a4981cd0-a3e1-48a0-82cb-369d6ead7921\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.754420 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a4981cd0-a3e1-48a0-82cb-369d6ead7921-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-qjgg7\" (UID: \"a4981cd0-a3e1-48a0-82cb-369d6ead7921\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.856086 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a4981cd0-a3e1-48a0-82cb-369d6ead7921-service-ca\") pod \"cluster-version-operator-5c965bbfc6-qjgg7\" (UID: \"a4981cd0-a3e1-48a0-82cb-369d6ead7921\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.857070 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a4981cd0-a3e1-48a0-82cb-369d6ead7921-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-qjgg7\" (UID: \"a4981cd0-a3e1-48a0-82cb-369d6ead7921\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.857244 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a4981cd0-a3e1-48a0-82cb-369d6ead7921-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-qjgg7\" (UID: \"a4981cd0-a3e1-48a0-82cb-369d6ead7921\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.857585 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a4981cd0-a3e1-48a0-82cb-369d6ead7921-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-qjgg7\" (UID: \"a4981cd0-a3e1-48a0-82cb-369d6ead7921\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.857839 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4981cd0-a3e1-48a0-82cb-369d6ead7921-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-qjgg7\" (UID: \"a4981cd0-a3e1-48a0-82cb-369d6ead7921\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.857664 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a4981cd0-a3e1-48a0-82cb-369d6ead7921-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-qjgg7\" (UID: \"a4981cd0-a3e1-48a0-82cb-369d6ead7921\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.857420 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a4981cd0-a3e1-48a0-82cb-369d6ead7921-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-qjgg7\" (UID: \"a4981cd0-a3e1-48a0-82cb-369d6ead7921\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.858060 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a4981cd0-a3e1-48a0-82cb-369d6ead7921-service-ca\") pod \"cluster-version-operator-5c965bbfc6-qjgg7\" (UID: \"a4981cd0-a3e1-48a0-82cb-369d6ead7921\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.867710 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4981cd0-a3e1-48a0-82cb-369d6ead7921-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-qjgg7\" (UID: \"a4981cd0-a3e1-48a0-82cb-369d6ead7921\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" Dec 04 17:29:45 crc kubenswrapper[4631]: I1204 17:29:45.880062 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a4981cd0-a3e1-48a0-82cb-369d6ead7921-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-qjgg7\" (UID: \"a4981cd0-a3e1-48a0-82cb-369d6ead7921\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" Dec 04 17:29:46 crc kubenswrapper[4631]: I1204 17:29:46.053672 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" Dec 04 17:29:46 crc kubenswrapper[4631]: W1204 17:29:46.080531 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4981cd0_a3e1_48a0_82cb_369d6ead7921.slice/crio-56aa5b3976d5887476be9a54ef48ed02e8ef1610b21903f9bfb2127cd5420eb8 WatchSource:0}: Error finding container 56aa5b3976d5887476be9a54ef48ed02e8ef1610b21903f9bfb2127cd5420eb8: Status 404 returned error can't find the container with id 56aa5b3976d5887476be9a54ef48ed02e8ef1610b21903f9bfb2127cd5420eb8 Dec 04 17:29:46 crc kubenswrapper[4631]: I1204 17:29:46.239167 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:46 crc kubenswrapper[4631]: I1204 17:29:46.239248 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:46 crc kubenswrapper[4631]: I1204 17:29:46.239349 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:46 crc kubenswrapper[4631]: E1204 17:29:46.239339 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:46 crc kubenswrapper[4631]: E1204 17:29:46.239644 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:46 crc kubenswrapper[4631]: I1204 17:29:46.239720 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:46 crc kubenswrapper[4631]: E1204 17:29:46.240484 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:46 crc kubenswrapper[4631]: E1204 17:29:46.240614 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:46 crc kubenswrapper[4631]: I1204 17:29:46.391837 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" event={"ID":"a4981cd0-a3e1-48a0-82cb-369d6ead7921","Type":"ContainerStarted","Data":"b2fbaf8869d9a2c6a71976433efcf2d1558e347a8fb269929f96ab33e6ba0b12"} Dec 04 17:29:46 crc kubenswrapper[4631]: I1204 17:29:46.391897 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" event={"ID":"a4981cd0-a3e1-48a0-82cb-369d6ead7921","Type":"ContainerStarted","Data":"56aa5b3976d5887476be9a54ef48ed02e8ef1610b21903f9bfb2127cd5420eb8"} Dec 04 17:29:47 crc kubenswrapper[4631]: I1204 17:29:47.429537 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-qjgg7" podStartSLOduration=91.429515917 podStartE2EDuration="1m31.429515917s" podCreationTimestamp="2025-12-04 17:28:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:29:47.428646912 +0000 UTC m=+117.460888940" watchObservedRunningTime="2025-12-04 17:29:47.429515917 +0000 UTC m=+117.461757925" Dec 04 17:29:48 crc kubenswrapper[4631]: I1204 17:29:48.242688 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:48 crc kubenswrapper[4631]: E1204 17:29:48.242901 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:48 crc kubenswrapper[4631]: I1204 17:29:48.243258 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:48 crc kubenswrapper[4631]: E1204 17:29:48.243662 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:48 crc kubenswrapper[4631]: I1204 17:29:48.243892 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:48 crc kubenswrapper[4631]: I1204 17:29:48.243831 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:48 crc kubenswrapper[4631]: E1204 17:29:48.244163 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:48 crc kubenswrapper[4631]: E1204 17:29:48.244548 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:49 crc kubenswrapper[4631]: I1204 17:29:49.239529 4631 scope.go:117] "RemoveContainer" containerID="04cf802bdf1fd0d81a685ca8171bfcea89086fdd4979fcc18df7c134d482e052" Dec 04 17:29:49 crc kubenswrapper[4631]: E1204 17:29:49.240478 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podUID="0d617abc-dc04-4807-b684-3640cde38e81" Dec 04 17:29:50 crc kubenswrapper[4631]: E1204 17:29:50.230807 4631 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 04 17:29:50 crc kubenswrapper[4631]: I1204 17:29:50.239451 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:50 crc kubenswrapper[4631]: I1204 17:29:50.239703 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:50 crc kubenswrapper[4631]: E1204 17:29:50.240621 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:50 crc kubenswrapper[4631]: I1204 17:29:50.240803 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:50 crc kubenswrapper[4631]: E1204 17:29:50.240960 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:50 crc kubenswrapper[4631]: I1204 17:29:50.241028 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:50 crc kubenswrapper[4631]: E1204 17:29:50.241146 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:50 crc kubenswrapper[4631]: E1204 17:29:50.241402 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:50 crc kubenswrapper[4631]: I1204 17:29:50.418046 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zllp2_03e821a0-13d4-417c-9e54-7073b08490db/kube-multus/1.log" Dec 04 17:29:50 crc kubenswrapper[4631]: I1204 17:29:50.418463 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zllp2_03e821a0-13d4-417c-9e54-7073b08490db/kube-multus/0.log" Dec 04 17:29:50 crc kubenswrapper[4631]: I1204 17:29:50.418496 4631 generic.go:334] "Generic (PLEG): container finished" podID="03e821a0-13d4-417c-9e54-7073b08490db" containerID="efdd152e4738f125d721a6d044c0d96a378761e28d18f292c85706d0f3158f4f" exitCode=1 Dec 04 17:29:50 crc kubenswrapper[4631]: I1204 17:29:50.418522 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zllp2" event={"ID":"03e821a0-13d4-417c-9e54-7073b08490db","Type":"ContainerDied","Data":"efdd152e4738f125d721a6d044c0d96a378761e28d18f292c85706d0f3158f4f"} Dec 04 17:29:50 crc kubenswrapper[4631]: I1204 17:29:50.418557 4631 scope.go:117] "RemoveContainer" containerID="690e911250eec94a7ede711020c6b119bb20adf528d8fddff5610579022f28aa" Dec 04 17:29:50 crc kubenswrapper[4631]: I1204 17:29:50.418889 4631 scope.go:117] "RemoveContainer" containerID="efdd152e4738f125d721a6d044c0d96a378761e28d18f292c85706d0f3158f4f" Dec 04 17:29:50 crc kubenswrapper[4631]: E1204 17:29:50.419016 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-zllp2_openshift-multus(03e821a0-13d4-417c-9e54-7073b08490db)\"" pod="openshift-multus/multus-zllp2" podUID="03e821a0-13d4-417c-9e54-7073b08490db" Dec 04 17:29:51 crc kubenswrapper[4631]: I1204 17:29:51.421750 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zllp2_03e821a0-13d4-417c-9e54-7073b08490db/kube-multus/1.log" Dec 04 17:29:51 crc kubenswrapper[4631]: E1204 17:29:51.647477 4631 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 17:29:52 crc kubenswrapper[4631]: I1204 17:29:52.241019 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:52 crc kubenswrapper[4631]: E1204 17:29:52.241157 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:52 crc kubenswrapper[4631]: I1204 17:29:52.241338 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:52 crc kubenswrapper[4631]: E1204 17:29:52.241416 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:52 crc kubenswrapper[4631]: I1204 17:29:52.241551 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:52 crc kubenswrapper[4631]: E1204 17:29:52.241616 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:52 crc kubenswrapper[4631]: I1204 17:29:52.242033 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:52 crc kubenswrapper[4631]: E1204 17:29:52.242102 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:54 crc kubenswrapper[4631]: I1204 17:29:54.239693 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:54 crc kubenswrapper[4631]: I1204 17:29:54.239705 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:54 crc kubenswrapper[4631]: E1204 17:29:54.240011 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:54 crc kubenswrapper[4631]: I1204 17:29:54.239736 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:54 crc kubenswrapper[4631]: E1204 17:29:54.240116 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:54 crc kubenswrapper[4631]: I1204 17:29:54.239775 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:54 crc kubenswrapper[4631]: E1204 17:29:54.240228 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:54 crc kubenswrapper[4631]: E1204 17:29:54.240299 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:56 crc kubenswrapper[4631]: I1204 17:29:56.238361 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:56 crc kubenswrapper[4631]: I1204 17:29:56.238553 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:56 crc kubenswrapper[4631]: I1204 17:29:56.238550 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:56 crc kubenswrapper[4631]: I1204 17:29:56.238592 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:56 crc kubenswrapper[4631]: E1204 17:29:56.240128 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:56 crc kubenswrapper[4631]: E1204 17:29:56.240463 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:56 crc kubenswrapper[4631]: E1204 17:29:56.240541 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:56 crc kubenswrapper[4631]: E1204 17:29:56.240659 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:29:56 crc kubenswrapper[4631]: E1204 17:29:56.648881 4631 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 17:29:58 crc kubenswrapper[4631]: I1204 17:29:58.239434 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:29:58 crc kubenswrapper[4631]: I1204 17:29:58.239491 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:29:58 crc kubenswrapper[4631]: E1204 17:29:58.240726 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:29:58 crc kubenswrapper[4631]: I1204 17:29:58.239576 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:29:58 crc kubenswrapper[4631]: E1204 17:29:58.240833 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:29:58 crc kubenswrapper[4631]: I1204 17:29:58.239513 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:29:58 crc kubenswrapper[4631]: E1204 17:29:58.240929 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:29:58 crc kubenswrapper[4631]: E1204 17:29:58.241022 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:30:00 crc kubenswrapper[4631]: I1204 17:30:00.238434 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:30:00 crc kubenswrapper[4631]: I1204 17:30:00.238546 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:30:00 crc kubenswrapper[4631]: I1204 17:30:00.239717 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:30:00 crc kubenswrapper[4631]: I1204 17:30:00.240407 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:30:00 crc kubenswrapper[4631]: E1204 17:30:00.240363 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:30:00 crc kubenswrapper[4631]: E1204 17:30:00.240556 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:30:00 crc kubenswrapper[4631]: E1204 17:30:00.240635 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:30:00 crc kubenswrapper[4631]: E1204 17:30:00.240714 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:30:01 crc kubenswrapper[4631]: E1204 17:30:01.649447 4631 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 17:30:02 crc kubenswrapper[4631]: I1204 17:30:02.238884 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:30:02 crc kubenswrapper[4631]: I1204 17:30:02.238914 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:30:02 crc kubenswrapper[4631]: I1204 17:30:02.238929 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:30:02 crc kubenswrapper[4631]: E1204 17:30:02.239069 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:30:02 crc kubenswrapper[4631]: I1204 17:30:02.239482 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:30:02 crc kubenswrapper[4631]: E1204 17:30:02.239560 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:30:02 crc kubenswrapper[4631]: E1204 17:30:02.239709 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:30:02 crc kubenswrapper[4631]: E1204 17:30:02.239735 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:30:02 crc kubenswrapper[4631]: I1204 17:30:02.240041 4631 scope.go:117] "RemoveContainer" containerID="04cf802bdf1fd0d81a685ca8171bfcea89086fdd4979fcc18df7c134d482e052" Dec 04 17:30:02 crc kubenswrapper[4631]: E1204 17:30:02.240247 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vpgzg_openshift-ovn-kubernetes(0d617abc-dc04-4807-b684-3640cde38e81)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podUID="0d617abc-dc04-4807-b684-3640cde38e81" Dec 04 17:30:03 crc kubenswrapper[4631]: I1204 17:30:03.239241 4631 scope.go:117] "RemoveContainer" containerID="efdd152e4738f125d721a6d044c0d96a378761e28d18f292c85706d0f3158f4f" Dec 04 17:30:04 crc kubenswrapper[4631]: I1204 17:30:04.238919 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:30:04 crc kubenswrapper[4631]: I1204 17:30:04.239006 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:30:04 crc kubenswrapper[4631]: I1204 17:30:04.239022 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:30:04 crc kubenswrapper[4631]: I1204 17:30:04.238995 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:30:04 crc kubenswrapper[4631]: E1204 17:30:04.239115 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:30:04 crc kubenswrapper[4631]: E1204 17:30:04.239178 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:30:04 crc kubenswrapper[4631]: E1204 17:30:04.239252 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:30:04 crc kubenswrapper[4631]: E1204 17:30:04.239313 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:30:04 crc kubenswrapper[4631]: I1204 17:30:04.469244 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zllp2_03e821a0-13d4-417c-9e54-7073b08490db/kube-multus/1.log" Dec 04 17:30:04 crc kubenswrapper[4631]: I1204 17:30:04.469302 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zllp2" event={"ID":"03e821a0-13d4-417c-9e54-7073b08490db","Type":"ContainerStarted","Data":"0318e7532312b85217ec8efa77e3954b4f97cddd0c6c323b138b158f7fe38080"} Dec 04 17:30:06 crc kubenswrapper[4631]: I1204 17:30:06.238269 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:30:06 crc kubenswrapper[4631]: E1204 17:30:06.238413 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:30:06 crc kubenswrapper[4631]: I1204 17:30:06.238427 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:30:06 crc kubenswrapper[4631]: I1204 17:30:06.238440 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:30:06 crc kubenswrapper[4631]: I1204 17:30:06.238603 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:30:06 crc kubenswrapper[4631]: E1204 17:30:06.238661 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:30:06 crc kubenswrapper[4631]: E1204 17:30:06.238848 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:30:06 crc kubenswrapper[4631]: E1204 17:30:06.239014 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:30:06 crc kubenswrapper[4631]: E1204 17:30:06.650588 4631 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 17:30:08 crc kubenswrapper[4631]: I1204 17:30:08.238640 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:30:08 crc kubenswrapper[4631]: E1204 17:30:08.239293 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:30:08 crc kubenswrapper[4631]: I1204 17:30:08.238678 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:30:08 crc kubenswrapper[4631]: E1204 17:30:08.239365 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:30:08 crc kubenswrapper[4631]: I1204 17:30:08.238703 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:30:08 crc kubenswrapper[4631]: E1204 17:30:08.239467 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:30:08 crc kubenswrapper[4631]: I1204 17:30:08.238649 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:30:08 crc kubenswrapper[4631]: E1204 17:30:08.239512 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:30:10 crc kubenswrapper[4631]: I1204 17:30:10.239203 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:30:10 crc kubenswrapper[4631]: I1204 17:30:10.239286 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:30:10 crc kubenswrapper[4631]: I1204 17:30:10.239455 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:30:10 crc kubenswrapper[4631]: I1204 17:30:10.239460 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:30:10 crc kubenswrapper[4631]: E1204 17:30:10.239520 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:30:10 crc kubenswrapper[4631]: E1204 17:30:10.240613 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:30:10 crc kubenswrapper[4631]: E1204 17:30:10.240694 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:30:10 crc kubenswrapper[4631]: E1204 17:30:10.240850 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:30:11 crc kubenswrapper[4631]: E1204 17:30:11.651269 4631 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 17:30:12 crc kubenswrapper[4631]: I1204 17:30:12.238563 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:30:12 crc kubenswrapper[4631]: I1204 17:30:12.238606 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:30:12 crc kubenswrapper[4631]: I1204 17:30:12.238610 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:30:12 crc kubenswrapper[4631]: E1204 17:30:12.238724 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:30:12 crc kubenswrapper[4631]: I1204 17:30:12.238828 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:30:12 crc kubenswrapper[4631]: E1204 17:30:12.238820 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:30:12 crc kubenswrapper[4631]: E1204 17:30:12.238937 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:30:12 crc kubenswrapper[4631]: E1204 17:30:12.238984 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:30:14 crc kubenswrapper[4631]: I1204 17:30:14.238900 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:30:14 crc kubenswrapper[4631]: I1204 17:30:14.238949 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:30:14 crc kubenswrapper[4631]: E1204 17:30:14.239666 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:30:14 crc kubenswrapper[4631]: I1204 17:30:14.238969 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:30:14 crc kubenswrapper[4631]: I1204 17:30:14.238949 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:30:14 crc kubenswrapper[4631]: E1204 17:30:14.239802 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:30:14 crc kubenswrapper[4631]: E1204 17:30:14.239895 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:30:14 crc kubenswrapper[4631]: E1204 17:30:14.239986 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:30:15 crc kubenswrapper[4631]: I1204 17:30:15.240617 4631 scope.go:117] "RemoveContainer" containerID="04cf802bdf1fd0d81a685ca8171bfcea89086fdd4979fcc18df7c134d482e052" Dec 04 17:30:16 crc kubenswrapper[4631]: I1204 17:30:16.238639 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:30:16 crc kubenswrapper[4631]: I1204 17:30:16.238761 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:30:16 crc kubenswrapper[4631]: E1204 17:30:16.238811 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:30:16 crc kubenswrapper[4631]: I1204 17:30:16.238836 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:30:16 crc kubenswrapper[4631]: I1204 17:30:16.238843 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:30:16 crc kubenswrapper[4631]: E1204 17:30:16.238931 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:30:16 crc kubenswrapper[4631]: E1204 17:30:16.239015 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:30:16 crc kubenswrapper[4631]: E1204 17:30:16.239076 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:30:16 crc kubenswrapper[4631]: E1204 17:30:16.652618 4631 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 17:30:18 crc kubenswrapper[4631]: I1204 17:30:18.238797 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:30:18 crc kubenswrapper[4631]: I1204 17:30:18.238877 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:30:18 crc kubenswrapper[4631]: E1204 17:30:18.239038 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:30:18 crc kubenswrapper[4631]: I1204 17:30:18.239065 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:30:18 crc kubenswrapper[4631]: E1204 17:30:18.239168 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:30:18 crc kubenswrapper[4631]: E1204 17:30:18.239352 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:30:18 crc kubenswrapper[4631]: I1204 17:30:18.239550 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:30:18 crc kubenswrapper[4631]: E1204 17:30:18.239883 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:30:20 crc kubenswrapper[4631]: I1204 17:30:20.238757 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:30:20 crc kubenswrapper[4631]: I1204 17:30:20.238749 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:30:20 crc kubenswrapper[4631]: I1204 17:30:20.238777 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:30:20 crc kubenswrapper[4631]: I1204 17:30:20.238788 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:30:20 crc kubenswrapper[4631]: E1204 17:30:20.240810 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:30:20 crc kubenswrapper[4631]: E1204 17:30:20.240866 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:30:20 crc kubenswrapper[4631]: E1204 17:30:20.240920 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:30:20 crc kubenswrapper[4631]: E1204 17:30:20.240977 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:30:20 crc kubenswrapper[4631]: I1204 17:30:20.532140 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovnkube-controller/3.log" Dec 04 17:30:20 crc kubenswrapper[4631]: I1204 17:30:20.535412 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerStarted","Data":"7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0"} Dec 04 17:30:20 crc kubenswrapper[4631]: I1204 17:30:20.536594 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:30:21 crc kubenswrapper[4631]: I1204 17:30:21.139234 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podStartSLOduration=125.139213881 podStartE2EDuration="2m5.139213881s" podCreationTimestamp="2025-12-04 17:28:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:20.594727964 +0000 UTC m=+150.626969962" watchObservedRunningTime="2025-12-04 17:30:21.139213881 +0000 UTC m=+151.171455879" Dec 04 17:30:21 crc kubenswrapper[4631]: I1204 17:30:21.140132 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-8kcrj"] Dec 04 17:30:21 crc kubenswrapper[4631]: I1204 17:30:21.140221 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:30:21 crc kubenswrapper[4631]: E1204 17:30:21.140310 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:30:21 crc kubenswrapper[4631]: E1204 17:30:21.653819 4631 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 04 17:30:22 crc kubenswrapper[4631]: I1204 17:30:22.239221 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:30:22 crc kubenswrapper[4631]: I1204 17:30:22.239267 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:30:22 crc kubenswrapper[4631]: I1204 17:30:22.239222 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:30:22 crc kubenswrapper[4631]: E1204 17:30:22.239424 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:30:22 crc kubenswrapper[4631]: E1204 17:30:22.239541 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:30:22 crc kubenswrapper[4631]: E1204 17:30:22.239639 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:30:23 crc kubenswrapper[4631]: I1204 17:30:23.238316 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:30:23 crc kubenswrapper[4631]: E1204 17:30:23.238461 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:30:24 crc kubenswrapper[4631]: I1204 17:30:24.135291 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:24 crc kubenswrapper[4631]: E1204 17:30:24.135453 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:32:26.13543035 +0000 UTC m=+276.167672348 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:24 crc kubenswrapper[4631]: I1204 17:30:24.236567 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:30:24 crc kubenswrapper[4631]: I1204 17:30:24.236650 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:30:24 crc kubenswrapper[4631]: I1204 17:30:24.236683 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:30:24 crc kubenswrapper[4631]: E1204 17:30:24.236828 4631 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 17:30:24 crc kubenswrapper[4631]: E1204 17:30:24.236872 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 17:30:24 crc kubenswrapper[4631]: E1204 17:30:24.236918 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 17:30:24 crc kubenswrapper[4631]: E1204 17:30:24.236935 4631 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:30:24 crc kubenswrapper[4631]: E1204 17:30:24.237034 4631 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 17:30:24 crc kubenswrapper[4631]: I1204 17:30:24.237224 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:30:24 crc kubenswrapper[4631]: E1204 17:30:24.237286 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 17:32:26.237253648 +0000 UTC m=+276.269495646 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 04 17:30:24 crc kubenswrapper[4631]: E1204 17:30:24.237317 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-04 17:32:26.237307159 +0000 UTC m=+276.269549217 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:30:24 crc kubenswrapper[4631]: E1204 17:30:24.237334 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-04 17:32:26.23732588 +0000 UTC m=+276.269567888 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 04 17:30:24 crc kubenswrapper[4631]: E1204 17:30:24.237485 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 04 17:30:24 crc kubenswrapper[4631]: E1204 17:30:24.237526 4631 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 04 17:30:24 crc kubenswrapper[4631]: E1204 17:30:24.237539 4631 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:30:24 crc kubenswrapper[4631]: E1204 17:30:24.237598 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-04 17:32:26.237577247 +0000 UTC m=+276.269819335 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 04 17:30:24 crc kubenswrapper[4631]: I1204 17:30:24.238399 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:30:24 crc kubenswrapper[4631]: I1204 17:30:24.238492 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:30:24 crc kubenswrapper[4631]: E1204 17:30:24.238629 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:30:24 crc kubenswrapper[4631]: E1204 17:30:24.238738 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:30:24 crc kubenswrapper[4631]: I1204 17:30:24.238789 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:30:24 crc kubenswrapper[4631]: E1204 17:30:24.238849 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:30:25 crc kubenswrapper[4631]: I1204 17:30:25.238647 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:30:25 crc kubenswrapper[4631]: E1204 17:30:25.238834 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8kcrj" podUID="86903bd1-674d-4fa2-b9d1-dbc8f347b72b" Dec 04 17:30:26 crc kubenswrapper[4631]: I1204 17:30:26.239087 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:30:26 crc kubenswrapper[4631]: I1204 17:30:26.239134 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:30:26 crc kubenswrapper[4631]: I1204 17:30:26.239170 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:30:26 crc kubenswrapper[4631]: E1204 17:30:26.239235 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 04 17:30:26 crc kubenswrapper[4631]: E1204 17:30:26.239294 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 04 17:30:26 crc kubenswrapper[4631]: E1204 17:30:26.239437 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 04 17:30:27 crc kubenswrapper[4631]: I1204 17:30:27.238534 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:30:27 crc kubenswrapper[4631]: I1204 17:30:27.241694 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 04 17:30:27 crc kubenswrapper[4631]: I1204 17:30:27.241832 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 04 17:30:28 crc kubenswrapper[4631]: I1204 17:30:28.238847 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:30:28 crc kubenswrapper[4631]: I1204 17:30:28.238896 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:30:28 crc kubenswrapper[4631]: I1204 17:30:28.238935 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:30:28 crc kubenswrapper[4631]: I1204 17:30:28.242266 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 04 17:30:28 crc kubenswrapper[4631]: I1204 17:30:28.242479 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 04 17:30:28 crc kubenswrapper[4631]: I1204 17:30:28.242624 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 04 17:30:28 crc kubenswrapper[4631]: I1204 17:30:28.242954 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.023202 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.023266 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.408101 4631 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.454354 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.455189 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.455851 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-pdgsq"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.456699 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-pdgsq" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.467523 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.467559 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.467510 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.467903 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.468027 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.467950 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.471924 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.472180 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.472254 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.473726 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/9a38e196-88e0-4add-8e52-40b1d8eb79e9-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-pdgsq\" (UID: \"9a38e196-88e0-4add-8e52-40b1d8eb79e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdgsq" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.473892 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9a38e196-88e0-4add-8e52-40b1d8eb79e9-images\") pod \"machine-api-operator-5694c8668f-pdgsq\" (UID: \"9a38e196-88e0-4add-8e52-40b1d8eb79e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdgsq" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.474008 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzhcm\" (UniqueName: \"kubernetes.io/projected/9a38e196-88e0-4add-8e52-40b1d8eb79e9-kube-api-access-zzhcm\") pod \"machine-api-operator-5694c8668f-pdgsq\" (UID: \"9a38e196-88e0-4add-8e52-40b1d8eb79e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdgsq" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.473760 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-hc4kn"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.474133 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a38e196-88e0-4add-8e52-40b1d8eb79e9-config\") pod \"machine-api-operator-5694c8668f-pdgsq\" (UID: \"9a38e196-88e0-4add-8e52-40b1d8eb79e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdgsq" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.474455 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/dbc61eb8-0a7c-4ea2-8f50-5a2522daa465-available-featuregates\") pod \"openshift-config-operator-7777fb866f-bvw8n\" (UID: \"dbc61eb8-0a7c-4ea2-8f50-5a2522daa465\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.474603 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dbc61eb8-0a7c-4ea2-8f50-5a2522daa465-serving-cert\") pod \"openshift-config-operator-7777fb866f-bvw8n\" (UID: \"dbc61eb8-0a7c-4ea2-8f50-5a2522daa465\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.474702 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4rxv\" (UniqueName: \"kubernetes.io/projected/dbc61eb8-0a7c-4ea2-8f50-5a2522daa465-kube-api-access-l4rxv\") pod \"openshift-config-operator-7777fb866f-bvw8n\" (UID: \"dbc61eb8-0a7c-4ea2-8f50-5a2522daa465\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.475023 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.476254 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5vvr2"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.482685 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.483535 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.494144 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.494849 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.495384 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.495576 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.495839 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.496227 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.496451 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.496642 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.496698 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.497128 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-kl45g"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.498944 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.499626 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-hgwwp"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.500007 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-vgm6t"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.500208 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.499247 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.501097 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-hgwwp" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.501943 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.502405 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9qbmf"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.498629 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.502807 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-glqcf"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.503313 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.502160 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.503756 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9qbmf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.503815 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.510456 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-z2968"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.511206 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.511528 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.511957 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.513609 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.515224 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.518415 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m8hln"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.518972 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rsqp8"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.519425 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rsqp8" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.520329 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.526701 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w2p6c"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.527289 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w2p6c" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.529907 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-br8kn"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.530390 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-ctqxg"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.530713 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cpw5s"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.531111 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cpw5s" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.531538 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-br8kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.532017 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.534520 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-g65qn"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.535169 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-g65qn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.544573 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-swgqn"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.545361 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-swgqn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.548074 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.548502 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.549817 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.550129 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.550390 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.551059 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.551342 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.551573 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.551790 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.551993 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.552321 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.552492 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.552760 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.553493 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.560398 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.560764 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.561409 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.561551 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.562195 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.562405 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.562513 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.562700 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.562437 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.562905 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-68slc"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.563044 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.563736 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-96c5q"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.563992 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fgbnd"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.564272 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kkczr"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.575591 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.580421 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.580933 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.581150 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.583539 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.595517 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.595602 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kkczr" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.599010 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.596786 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-68slc" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.608596 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.596812 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-96c5q" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.596751 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fgbnd" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.632606 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.633166 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.633341 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.633513 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.632733 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.633179 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634073 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-console-config\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634096 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-audit-dir\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634117 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gg7s\" (UniqueName: \"kubernetes.io/projected/f645aac4-2afb-45b9-8f28-dca8259c7278-kube-api-access-4gg7s\") pod \"console-operator-58897d9998-hgwwp\" (UID: \"f645aac4-2afb-45b9-8f28-dca8259c7278\") " pod="openshift-console-operator/console-operator-58897d9998-hgwwp" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634162 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9a38e196-88e0-4add-8e52-40b1d8eb79e9-images\") pod \"machine-api-operator-5694c8668f-pdgsq\" (UID: \"9a38e196-88e0-4add-8e52-40b1d8eb79e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdgsq" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634184 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/81559ff3-95e9-455f-9d90-46c5f1a981ce-console-serving-cert\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634205 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e59edbf8-41cb-4d41-b012-a5e2dcf83df4-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-br8kn\" (UID: \"e59edbf8-41cb-4d41-b012-a5e2dcf83df4\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-br8kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634223 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e59edbf8-41cb-4d41-b012-a5e2dcf83df4-config\") pod \"kube-controller-manager-operator-78b949d7b-br8kn\" (UID: \"e59edbf8-41cb-4d41-b012-a5e2dcf83df4\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-br8kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634246 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ff4cb9cc-d164-4771-8e5f-41acc28c25e6-bound-sa-token\") pod \"ingress-operator-5b745b69d9-22m6s\" (UID: \"ff4cb9cc-d164-4771-8e5f-41acc28c25e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634267 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/808ad22e-4629-4a7d-8613-b35fedecc2ed-serving-cert\") pod \"etcd-operator-b45778765-vgm6t\" (UID: \"808ad22e-4629-4a7d-8613-b35fedecc2ed\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634289 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbdv8\" (UniqueName: \"kubernetes.io/projected/81559ff3-95e9-455f-9d90-46c5f1a981ce-kube-api-access-wbdv8\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634308 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85d00b11-ed99-44ac-81b8-73d958bc4d3e-serving-cert\") pod \"controller-manager-879f6c89f-5vvr2\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634332 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5m6j\" (UniqueName: \"kubernetes.io/projected/832113b6-ef23-4863-8080-cea0494584e7-kube-api-access-q5m6j\") pod \"downloads-7954f5f757-g65qn\" (UID: \"832113b6-ef23-4863-8080-cea0494584e7\") " pod="openshift-console/downloads-7954f5f757-g65qn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634354 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b65mk\" (UniqueName: \"kubernetes.io/projected/f6f09a1b-f9c2-43ec-8222-1fa25a379095-kube-api-access-b65mk\") pod \"route-controller-manager-6576b87f9c-hdlxr\" (UID: \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634403 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-audit-policies\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634427 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634448 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f645aac4-2afb-45b9-8f28-dca8259c7278-serving-cert\") pod \"console-operator-58897d9998-hgwwp\" (UID: \"f645aac4-2afb-45b9-8f28-dca8259c7278\") " pod="openshift-console-operator/console-operator-58897d9998-hgwwp" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634483 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a38e196-88e0-4add-8e52-40b1d8eb79e9-config\") pod \"machine-api-operator-5694c8668f-pdgsq\" (UID: \"9a38e196-88e0-4add-8e52-40b1d8eb79e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdgsq" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634505 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ff4cb9cc-d164-4771-8e5f-41acc28c25e6-metrics-tls\") pod \"ingress-operator-5b745b69d9-22m6s\" (UID: \"ff4cb9cc-d164-4771-8e5f-41acc28c25e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634529 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/dbc61eb8-0a7c-4ea2-8f50-5a2522daa465-available-featuregates\") pod \"openshift-config-operator-7777fb866f-bvw8n\" (UID: \"dbc61eb8-0a7c-4ea2-8f50-5a2522daa465\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634552 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c436effe-de7b-4e3b-a61a-0ff4a7067363-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-rsqp8\" (UID: \"c436effe-de7b-4e3b-a61a-0ff4a7067363\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rsqp8" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634574 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634594 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-trusted-ca-bundle\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634617 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42w5f\" (UniqueName: \"kubernetes.io/projected/c436effe-de7b-4e3b-a61a-0ff4a7067363-kube-api-access-42w5f\") pod \"openshift-controller-manager-operator-756b6f6bc6-rsqp8\" (UID: \"c436effe-de7b-4e3b-a61a-0ff4a7067363\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rsqp8" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634644 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dbc61eb8-0a7c-4ea2-8f50-5a2522daa465-serving-cert\") pod \"openshift-config-operator-7777fb866f-bvw8n\" (UID: \"dbc61eb8-0a7c-4ea2-8f50-5a2522daa465\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634668 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/30620276-02bb-4cea-a50e-36fc7d4689ae-trusted-ca-bundle\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634688 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njcf7\" (UniqueName: \"kubernetes.io/projected/85d00b11-ed99-44ac-81b8-73d958bc4d3e-kube-api-access-njcf7\") pod \"controller-manager-879f6c89f-5vvr2\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634709 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634733 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/808ad22e-4629-4a7d-8613-b35fedecc2ed-config\") pod \"etcd-operator-b45778765-vgm6t\" (UID: \"808ad22e-4629-4a7d-8613-b35fedecc2ed\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634755 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4rxv\" (UniqueName: \"kubernetes.io/projected/dbc61eb8-0a7c-4ea2-8f50-5a2522daa465-kube-api-access-l4rxv\") pod \"openshift-config-operator-7777fb866f-bvw8n\" (UID: \"dbc61eb8-0a7c-4ea2-8f50-5a2522daa465\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634786 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/30620276-02bb-4cea-a50e-36fc7d4689ae-node-pullsecrets\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634808 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634828 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfqzx\" (UniqueName: \"kubernetes.io/projected/ff4cb9cc-d164-4771-8e5f-41acc28c25e6-kube-api-access-cfqzx\") pod \"ingress-operator-5b745b69d9-22m6s\" (UID: \"ff4cb9cc-d164-4771-8e5f-41acc28c25e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634851 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85d00b11-ed99-44ac-81b8-73d958bc4d3e-client-ca\") pod \"controller-manager-879f6c89f-5vvr2\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634872 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634898 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/9a38e196-88e0-4add-8e52-40b1d8eb79e9-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-pdgsq\" (UID: \"9a38e196-88e0-4add-8e52-40b1d8eb79e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdgsq" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634921 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/30620276-02bb-4cea-a50e-36fc7d4689ae-etcd-client\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634943 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f6f09a1b-f9c2-43ec-8222-1fa25a379095-client-ca\") pod \"route-controller-manager-6576b87f9c-hdlxr\" (UID: \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634964 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbt6f\" (UniqueName: \"kubernetes.io/projected/4ef93ccc-0327-4a63-8f44-4cb3f9e1ba83-kube-api-access-jbt6f\") pod \"cluster-samples-operator-665b6dd947-9qbmf\" (UID: \"4ef93ccc-0327-4a63-8f44-4cb3f9e1ba83\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9qbmf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.634986 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2t6n\" (UniqueName: \"kubernetes.io/projected/30620276-02bb-4cea-a50e-36fc7d4689ae-kube-api-access-q2t6n\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635007 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/808ad22e-4629-4a7d-8613-b35fedecc2ed-etcd-ca\") pod \"etcd-operator-b45778765-vgm6t\" (UID: \"808ad22e-4629-4a7d-8613-b35fedecc2ed\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635031 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635091 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/db00623b-8c6a-45d1-ab8b-a7e4f81f64eb-metrics-certs\") pod \"router-default-5444994796-glqcf\" (UID: \"db00623b-8c6a-45d1-ab8b-a7e4f81f64eb\") " pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635115 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635138 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/30620276-02bb-4cea-a50e-36fc7d4689ae-etcd-serving-ca\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635159 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-oauth-serving-cert\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635179 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/85d00b11-ed99-44ac-81b8-73d958bc4d3e-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-5vvr2\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635200 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/db00623b-8c6a-45d1-ab8b-a7e4f81f64eb-stats-auth\") pod \"router-default-5444994796-glqcf\" (UID: \"db00623b-8c6a-45d1-ab8b-a7e4f81f64eb\") " pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635223 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvlbb\" (UniqueName: \"kubernetes.io/projected/808ad22e-4629-4a7d-8613-b35fedecc2ed-kube-api-access-gvlbb\") pod \"etcd-operator-b45778765-vgm6t\" (UID: \"808ad22e-4629-4a7d-8613-b35fedecc2ed\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635251 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/30620276-02bb-4cea-a50e-36fc7d4689ae-encryption-config\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635272 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ff4cb9cc-d164-4771-8e5f-41acc28c25e6-trusted-ca\") pod \"ingress-operator-5b745b69d9-22m6s\" (UID: \"ff4cb9cc-d164-4771-8e5f-41acc28c25e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635292 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85d00b11-ed99-44ac-81b8-73d958bc4d3e-config\") pod \"controller-manager-879f6c89f-5vvr2\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635313 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635415 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzhcm\" (UniqueName: \"kubernetes.io/projected/9a38e196-88e0-4add-8e52-40b1d8eb79e9-kube-api-access-zzhcm\") pod \"machine-api-operator-5694c8668f-pdgsq\" (UID: \"9a38e196-88e0-4add-8e52-40b1d8eb79e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdgsq" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635441 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/30620276-02bb-4cea-a50e-36fc7d4689ae-serving-cert\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635462 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6f09a1b-f9c2-43ec-8222-1fa25a379095-config\") pod \"route-controller-manager-6576b87f9c-hdlxr\" (UID: \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635484 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6f09a1b-f9c2-43ec-8222-1fa25a379095-serving-cert\") pod \"route-controller-manager-6576b87f9c-hdlxr\" (UID: \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635507 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/30620276-02bb-4cea-a50e-36fc7d4689ae-image-import-ca\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635534 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/30620276-02bb-4cea-a50e-36fc7d4689ae-audit\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635589 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klrg9\" (UniqueName: \"kubernetes.io/projected/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-kube-api-access-klrg9\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635629 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f645aac4-2afb-45b9-8f28-dca8259c7278-trusted-ca\") pod \"console-operator-58897d9998-hgwwp\" (UID: \"f645aac4-2afb-45b9-8f28-dca8259c7278\") " pod="openshift-console-operator/console-operator-58897d9998-hgwwp" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635651 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-service-ca\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635675 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30620276-02bb-4cea-a50e-36fc7d4689ae-config\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635699 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4npp2\" (UniqueName: \"kubernetes.io/projected/db00623b-8c6a-45d1-ab8b-a7e4f81f64eb-kube-api-access-4npp2\") pod \"router-default-5444994796-glqcf\" (UID: \"db00623b-8c6a-45d1-ab8b-a7e4f81f64eb\") " pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635722 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/808ad22e-4629-4a7d-8613-b35fedecc2ed-etcd-service-ca\") pod \"etcd-operator-b45778765-vgm6t\" (UID: \"808ad22e-4629-4a7d-8613-b35fedecc2ed\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635760 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e59edbf8-41cb-4d41-b012-a5e2dcf83df4-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-br8kn\" (UID: \"e59edbf8-41cb-4d41-b012-a5e2dcf83df4\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-br8kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635781 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/db00623b-8c6a-45d1-ab8b-a7e4f81f64eb-service-ca-bundle\") pod \"router-default-5444994796-glqcf\" (UID: \"db00623b-8c6a-45d1-ab8b-a7e4f81f64eb\") " pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635803 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635826 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/db00623b-8c6a-45d1-ab8b-a7e4f81f64eb-default-certificate\") pod \"router-default-5444994796-glqcf\" (UID: \"db00623b-8c6a-45d1-ab8b-a7e4f81f64eb\") " pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635848 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635871 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f645aac4-2afb-45b9-8f28-dca8259c7278-config\") pod \"console-operator-58897d9998-hgwwp\" (UID: \"f645aac4-2afb-45b9-8f28-dca8259c7278\") " pod="openshift-console-operator/console-operator-58897d9998-hgwwp" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635894 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/81559ff3-95e9-455f-9d90-46c5f1a981ce-console-oauth-config\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635915 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c436effe-de7b-4e3b-a61a-0ff4a7067363-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-rsqp8\" (UID: \"c436effe-de7b-4e3b-a61a-0ff4a7067363\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rsqp8" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635953 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/30620276-02bb-4cea-a50e-36fc7d4689ae-audit-dir\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.635977 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/808ad22e-4629-4a7d-8613-b35fedecc2ed-etcd-client\") pod \"etcd-operator-b45778765-vgm6t\" (UID: \"808ad22e-4629-4a7d-8613-b35fedecc2ed\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.636002 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/4ef93ccc-0327-4a63-8f44-4cb3f9e1ba83-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-9qbmf\" (UID: \"4ef93ccc-0327-4a63-8f44-4cb3f9e1ba83\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9qbmf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.637954 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.638342 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.641360 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9a38e196-88e0-4add-8e52-40b1d8eb79e9-images\") pod \"machine-api-operator-5694c8668f-pdgsq\" (UID: \"9a38e196-88e0-4add-8e52-40b1d8eb79e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdgsq" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.609089 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.642722 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a38e196-88e0-4add-8e52-40b1d8eb79e9-config\") pod \"machine-api-operator-5694c8668f-pdgsq\" (UID: \"9a38e196-88e0-4add-8e52-40b1d8eb79e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdgsq" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.643309 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/dbc61eb8-0a7c-4ea2-8f50-5a2522daa465-available-featuregates\") pod \"openshift-config-operator-7777fb866f-bvw8n\" (UID: \"dbc61eb8-0a7c-4ea2-8f50-5a2522daa465\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.645180 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.645398 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.645484 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.645557 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.645643 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.645716 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.645787 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.645872 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.645949 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.646753 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.650512 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.650634 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.651561 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6lshd"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.651651 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.652068 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-gswpr"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.652565 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-gswpr" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.652829 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.653026 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6lshd" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.657332 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.657384 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.660438 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.660488 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.660568 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.660742 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.660815 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.660900 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.660979 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.661048 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.661121 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.661192 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.661259 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.661328 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.661437 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.661527 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.661652 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.661741 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.661965 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.662661 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.663286 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.663758 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.663844 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.663932 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.664074 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.664220 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.660748 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.667139 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.667287 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.670197 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.670430 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.670549 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-4wt5l"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.670961 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.671267 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4wt5l" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.671909 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-pqqkz"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.672280 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-pqqkz" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.672718 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dbc61eb8-0a7c-4ea2-8f50-5a2522daa465-serving-cert\") pod \"openshift-config-operator-7777fb866f-bvw8n\" (UID: \"dbc61eb8-0a7c-4ea2-8f50-5a2522daa465\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.673043 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-pdgsq"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.678586 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.678616 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.679426 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vtdlj"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.679901 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.682025 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/9a38e196-88e0-4add-8e52-40b1d8eb79e9-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-pdgsq\" (UID: \"9a38e196-88e0-4add-8e52-40b1d8eb79e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdgsq" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.682563 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.684453 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.697595 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vtdlj" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.700899 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.701471 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-sgr7v"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.701859 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.702192 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.702652 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.702785 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sgr7v" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.705707 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d97wj"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.706017 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-g4684"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.706288 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5vvr2"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.706337 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-g4684" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.724973 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.726000 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.732111 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j54mq"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.735193 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j54mq" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.735912 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738013 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/30620276-02bb-4cea-a50e-36fc7d4689ae-node-pullsecrets\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738046 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738074 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfqzx\" (UniqueName: \"kubernetes.io/projected/ff4cb9cc-d164-4771-8e5f-41acc28c25e6-kube-api-access-cfqzx\") pod \"ingress-operator-5b745b69d9-22m6s\" (UID: \"ff4cb9cc-d164-4771-8e5f-41acc28c25e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738153 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85d00b11-ed99-44ac-81b8-73d958bc4d3e-client-ca\") pod \"controller-manager-879f6c89f-5vvr2\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738175 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738193 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/30620276-02bb-4cea-a50e-36fc7d4689ae-etcd-client\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738211 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f6f09a1b-f9c2-43ec-8222-1fa25a379095-client-ca\") pod \"route-controller-manager-6576b87f9c-hdlxr\" (UID: \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738232 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbt6f\" (UniqueName: \"kubernetes.io/projected/4ef93ccc-0327-4a63-8f44-4cb3f9e1ba83-kube-api-access-jbt6f\") pod \"cluster-samples-operator-665b6dd947-9qbmf\" (UID: \"4ef93ccc-0327-4a63-8f44-4cb3f9e1ba83\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9qbmf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738317 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2t6n\" (UniqueName: \"kubernetes.io/projected/30620276-02bb-4cea-a50e-36fc7d4689ae-kube-api-access-q2t6n\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738337 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/808ad22e-4629-4a7d-8613-b35fedecc2ed-etcd-ca\") pod \"etcd-operator-b45778765-vgm6t\" (UID: \"808ad22e-4629-4a7d-8613-b35fedecc2ed\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738358 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738502 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/db00623b-8c6a-45d1-ab8b-a7e4f81f64eb-metrics-certs\") pod \"router-default-5444994796-glqcf\" (UID: \"db00623b-8c6a-45d1-ab8b-a7e4f81f64eb\") " pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738550 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738568 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/30620276-02bb-4cea-a50e-36fc7d4689ae-etcd-serving-ca\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738770 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-oauth-serving-cert\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738795 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/85d00b11-ed99-44ac-81b8-73d958bc4d3e-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-5vvr2\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738817 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/db00623b-8c6a-45d1-ab8b-a7e4f81f64eb-stats-auth\") pod \"router-default-5444994796-glqcf\" (UID: \"db00623b-8c6a-45d1-ab8b-a7e4f81f64eb\") " pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738840 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvlbb\" (UniqueName: \"kubernetes.io/projected/808ad22e-4629-4a7d-8613-b35fedecc2ed-kube-api-access-gvlbb\") pod \"etcd-operator-b45778765-vgm6t\" (UID: \"808ad22e-4629-4a7d-8613-b35fedecc2ed\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738859 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/30620276-02bb-4cea-a50e-36fc7d4689ae-encryption-config\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738878 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ff4cb9cc-d164-4771-8e5f-41acc28c25e6-trusted-ca\") pod \"ingress-operator-5b745b69d9-22m6s\" (UID: \"ff4cb9cc-d164-4771-8e5f-41acc28c25e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738897 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85d00b11-ed99-44ac-81b8-73d958bc4d3e-config\") pod \"controller-manager-879f6c89f-5vvr2\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738921 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.738943 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6f09a1b-f9c2-43ec-8222-1fa25a379095-config\") pod \"route-controller-manager-6576b87f9c-hdlxr\" (UID: \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.739138 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/30620276-02bb-4cea-a50e-36fc7d4689ae-serving-cert\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.739164 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6f09a1b-f9c2-43ec-8222-1fa25a379095-serving-cert\") pod \"route-controller-manager-6576b87f9c-hdlxr\" (UID: \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.739194 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/30620276-02bb-4cea-a50e-36fc7d4689ae-image-import-ca\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.739220 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/30620276-02bb-4cea-a50e-36fc7d4689ae-audit\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.739243 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-service-ca\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.739433 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klrg9\" (UniqueName: \"kubernetes.io/projected/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-kube-api-access-klrg9\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.739459 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f645aac4-2afb-45b9-8f28-dca8259c7278-trusted-ca\") pod \"console-operator-58897d9998-hgwwp\" (UID: \"f645aac4-2afb-45b9-8f28-dca8259c7278\") " pod="openshift-console-operator/console-operator-58897d9998-hgwwp" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.739489 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30620276-02bb-4cea-a50e-36fc7d4689ae-config\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.739518 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4npp2\" (UniqueName: \"kubernetes.io/projected/db00623b-8c6a-45d1-ab8b-a7e4f81f64eb-kube-api-access-4npp2\") pod \"router-default-5444994796-glqcf\" (UID: \"db00623b-8c6a-45d1-ab8b-a7e4f81f64eb\") " pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.739644 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/808ad22e-4629-4a7d-8613-b35fedecc2ed-etcd-service-ca\") pod \"etcd-operator-b45778765-vgm6t\" (UID: \"808ad22e-4629-4a7d-8613-b35fedecc2ed\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.739675 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e59edbf8-41cb-4d41-b012-a5e2dcf83df4-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-br8kn\" (UID: \"e59edbf8-41cb-4d41-b012-a5e2dcf83df4\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-br8kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.739693 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/db00623b-8c6a-45d1-ab8b-a7e4f81f64eb-service-ca-bundle\") pod \"router-default-5444994796-glqcf\" (UID: \"db00623b-8c6a-45d1-ab8b-a7e4f81f64eb\") " pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740098 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740123 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/db00623b-8c6a-45d1-ab8b-a7e4f81f64eb-default-certificate\") pod \"router-default-5444994796-glqcf\" (UID: \"db00623b-8c6a-45d1-ab8b-a7e4f81f64eb\") " pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740145 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740168 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f645aac4-2afb-45b9-8f28-dca8259c7278-config\") pod \"console-operator-58897d9998-hgwwp\" (UID: \"f645aac4-2afb-45b9-8f28-dca8259c7278\") " pod="openshift-console-operator/console-operator-58897d9998-hgwwp" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740187 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/30620276-02bb-4cea-a50e-36fc7d4689ae-audit-dir\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740210 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/81559ff3-95e9-455f-9d90-46c5f1a981ce-console-oauth-config\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740229 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c436effe-de7b-4e3b-a61a-0ff4a7067363-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-rsqp8\" (UID: \"c436effe-de7b-4e3b-a61a-0ff4a7067363\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rsqp8" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740247 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/808ad22e-4629-4a7d-8613-b35fedecc2ed-etcd-client\") pod \"etcd-operator-b45778765-vgm6t\" (UID: \"808ad22e-4629-4a7d-8613-b35fedecc2ed\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740270 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/4ef93ccc-0327-4a63-8f44-4cb3f9e1ba83-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-9qbmf\" (UID: \"4ef93ccc-0327-4a63-8f44-4cb3f9e1ba83\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9qbmf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740292 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740315 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-console-config\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740332 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-audit-dir\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740350 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gg7s\" (UniqueName: \"kubernetes.io/projected/f645aac4-2afb-45b9-8f28-dca8259c7278-kube-api-access-4gg7s\") pod \"console-operator-58897d9998-hgwwp\" (UID: \"f645aac4-2afb-45b9-8f28-dca8259c7278\") " pod="openshift-console-operator/console-operator-58897d9998-hgwwp" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740384 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/81559ff3-95e9-455f-9d90-46c5f1a981ce-console-serving-cert\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740404 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e59edbf8-41cb-4d41-b012-a5e2dcf83df4-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-br8kn\" (UID: \"e59edbf8-41cb-4d41-b012-a5e2dcf83df4\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-br8kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740442 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e59edbf8-41cb-4d41-b012-a5e2dcf83df4-config\") pod \"kube-controller-manager-operator-78b949d7b-br8kn\" (UID: \"e59edbf8-41cb-4d41-b012-a5e2dcf83df4\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-br8kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740462 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ff4cb9cc-d164-4771-8e5f-41acc28c25e6-bound-sa-token\") pod \"ingress-operator-5b745b69d9-22m6s\" (UID: \"ff4cb9cc-d164-4771-8e5f-41acc28c25e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740479 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/808ad22e-4629-4a7d-8613-b35fedecc2ed-serving-cert\") pod \"etcd-operator-b45778765-vgm6t\" (UID: \"808ad22e-4629-4a7d-8613-b35fedecc2ed\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740495 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbdv8\" (UniqueName: \"kubernetes.io/projected/81559ff3-95e9-455f-9d90-46c5f1a981ce-kube-api-access-wbdv8\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740513 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85d00b11-ed99-44ac-81b8-73d958bc4d3e-serving-cert\") pod \"controller-manager-879f6c89f-5vvr2\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740573 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5m6j\" (UniqueName: \"kubernetes.io/projected/832113b6-ef23-4863-8080-cea0494584e7-kube-api-access-q5m6j\") pod \"downloads-7954f5f757-g65qn\" (UID: \"832113b6-ef23-4863-8080-cea0494584e7\") " pod="openshift-console/downloads-7954f5f757-g65qn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740595 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b65mk\" (UniqueName: \"kubernetes.io/projected/f6f09a1b-f9c2-43ec-8222-1fa25a379095-kube-api-access-b65mk\") pod \"route-controller-manager-6576b87f9c-hdlxr\" (UID: \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740613 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740630 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-audit-policies\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740778 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f645aac4-2afb-45b9-8f28-dca8259c7278-serving-cert\") pod \"console-operator-58897d9998-hgwwp\" (UID: \"f645aac4-2afb-45b9-8f28-dca8259c7278\") " pod="openshift-console-operator/console-operator-58897d9998-hgwwp" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740803 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ff4cb9cc-d164-4771-8e5f-41acc28c25e6-metrics-tls\") pod \"ingress-operator-5b745b69d9-22m6s\" (UID: \"ff4cb9cc-d164-4771-8e5f-41acc28c25e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740821 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c436effe-de7b-4e3b-a61a-0ff4a7067363-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-rsqp8\" (UID: \"c436effe-de7b-4e3b-a61a-0ff4a7067363\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rsqp8" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740839 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.740982 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-trusted-ca-bundle\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.741003 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42w5f\" (UniqueName: \"kubernetes.io/projected/c436effe-de7b-4e3b-a61a-0ff4a7067363-kube-api-access-42w5f\") pod \"openshift-controller-manager-operator-756b6f6bc6-rsqp8\" (UID: \"c436effe-de7b-4e3b-a61a-0ff4a7067363\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rsqp8" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.741031 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/30620276-02bb-4cea-a50e-36fc7d4689ae-trusted-ca-bundle\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.741051 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njcf7\" (UniqueName: \"kubernetes.io/projected/85d00b11-ed99-44ac-81b8-73d958bc4d3e-kube-api-access-njcf7\") pod \"controller-manager-879f6c89f-5vvr2\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.741068 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.741236 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/808ad22e-4629-4a7d-8613-b35fedecc2ed-config\") pod \"etcd-operator-b45778765-vgm6t\" (UID: \"808ad22e-4629-4a7d-8613-b35fedecc2ed\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.741582 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-audit-dir\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.742494 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-console-config\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.753325 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f645aac4-2afb-45b9-8f28-dca8259c7278-serving-cert\") pod \"console-operator-58897d9998-hgwwp\" (UID: \"f645aac4-2afb-45b9-8f28-dca8259c7278\") " pod="openshift-console-operator/console-operator-58897d9998-hgwwp" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.755875 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e59edbf8-41cb-4d41-b012-a5e2dcf83df4-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-br8kn\" (UID: \"e59edbf8-41cb-4d41-b012-a5e2dcf83df4\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-br8kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.758849 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/30620276-02bb-4cea-a50e-36fc7d4689ae-node-pullsecrets\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.760890 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/30620276-02bb-4cea-a50e-36fc7d4689ae-audit\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.761251 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f6f09a1b-f9c2-43ec-8222-1fa25a379095-client-ca\") pod \"route-controller-manager-6576b87f9c-hdlxr\" (UID: \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.762178 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30620276-02bb-4cea-a50e-36fc7d4689ae-config\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.763787 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/808ad22e-4629-4a7d-8613-b35fedecc2ed-etcd-service-ca\") pod \"etcd-operator-b45778765-vgm6t\" (UID: \"808ad22e-4629-4a7d-8613-b35fedecc2ed\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.764750 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/db00623b-8c6a-45d1-ab8b-a7e4f81f64eb-service-ca-bundle\") pod \"router-default-5444994796-glqcf\" (UID: \"db00623b-8c6a-45d1-ab8b-a7e4f81f64eb\") " pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.765580 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/30620276-02bb-4cea-a50e-36fc7d4689ae-audit-dir\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.766046 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85d00b11-ed99-44ac-81b8-73d958bc4d3e-client-ca\") pod \"controller-manager-879f6c89f-5vvr2\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.766616 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-service-ca\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.768775 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f645aac4-2afb-45b9-8f28-dca8259c7278-trusted-ca\") pod \"console-operator-58897d9998-hgwwp\" (UID: \"f645aac4-2afb-45b9-8f28-dca8259c7278\") " pod="openshift-console-operator/console-operator-58897d9998-hgwwp" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.769788 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c436effe-de7b-4e3b-a61a-0ff4a7067363-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-rsqp8\" (UID: \"c436effe-de7b-4e3b-a61a-0ff4a7067363\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rsqp8" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.769877 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-trusted-ca-bundle\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.770679 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/30620276-02bb-4cea-a50e-36fc7d4689ae-trusted-ca-bundle\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.771054 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/30620276-02bb-4cea-a50e-36fc7d4689ae-image-import-ca\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.771492 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/81559ff3-95e9-455f-9d90-46c5f1a981ce-console-serving-cert\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.771798 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/808ad22e-4629-4a7d-8613-b35fedecc2ed-serving-cert\") pod \"etcd-operator-b45778765-vgm6t\" (UID: \"808ad22e-4629-4a7d-8613-b35fedecc2ed\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.771870 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/808ad22e-4629-4a7d-8613-b35fedecc2ed-config\") pod \"etcd-operator-b45778765-vgm6t\" (UID: \"808ad22e-4629-4a7d-8613-b35fedecc2ed\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.772322 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f645aac4-2afb-45b9-8f28-dca8259c7278-config\") pod \"console-operator-58897d9998-hgwwp\" (UID: \"f645aac4-2afb-45b9-8f28-dca8259c7278\") " pod="openshift-console-operator/console-operator-58897d9998-hgwwp" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.772595 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ff4cb9cc-d164-4771-8e5f-41acc28c25e6-metrics-tls\") pod \"ingress-operator-5b745b69d9-22m6s\" (UID: \"ff4cb9cc-d164-4771-8e5f-41acc28c25e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.772863 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/db00623b-8c6a-45d1-ab8b-a7e4f81f64eb-metrics-certs\") pod \"router-default-5444994796-glqcf\" (UID: \"db00623b-8c6a-45d1-ab8b-a7e4f81f64eb\") " pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.772889 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.773150 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rsqp8"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.773178 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-z2968"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.773667 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/808ad22e-4629-4a7d-8613-b35fedecc2ed-etcd-ca\") pod \"etcd-operator-b45778765-vgm6t\" (UID: \"808ad22e-4629-4a7d-8613-b35fedecc2ed\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.775829 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/85d00b11-ed99-44ac-81b8-73d958bc4d3e-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-5vvr2\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.776443 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c436effe-de7b-4e3b-a61a-0ff4a7067363-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-rsqp8\" (UID: \"c436effe-de7b-4e3b-a61a-0ff4a7067363\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rsqp8" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.778220 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-audit-policies\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.778912 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85d00b11-ed99-44ac-81b8-73d958bc4d3e-serving-cert\") pod \"controller-manager-879f6c89f-5vvr2\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.781071 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/db00623b-8c6a-45d1-ab8b-a7e4f81f64eb-default-certificate\") pod \"router-default-5444994796-glqcf\" (UID: \"db00623b-8c6a-45d1-ab8b-a7e4f81f64eb\") " pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.783289 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6f09a1b-f9c2-43ec-8222-1fa25a379095-serving-cert\") pod \"route-controller-manager-6576b87f9c-hdlxr\" (UID: \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.783993 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/30620276-02bb-4cea-a50e-36fc7d4689ae-etcd-serving-ca\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.784394 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-oauth-serving-cert\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.784419 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6f09a1b-f9c2-43ec-8222-1fa25a379095-config\") pod \"route-controller-manager-6576b87f9c-hdlxr\" (UID: \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.784599 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/30620276-02bb-4cea-a50e-36fc7d4689ae-serving-cert\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.786270 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/81559ff3-95e9-455f-9d90-46c5f1a981ce-console-oauth-config\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.789351 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.789428 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/db00623b-8c6a-45d1-ab8b-a7e4f81f64eb-stats-auth\") pod \"router-default-5444994796-glqcf\" (UID: \"db00623b-8c6a-45d1-ab8b-a7e4f81f64eb\") " pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.790301 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/30620276-02bb-4cea-a50e-36fc7d4689ae-encryption-config\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.792765 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/4ef93ccc-0327-4a63-8f44-4cb3f9e1ba83-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-9qbmf\" (UID: \"4ef93ccc-0327-4a63-8f44-4cb3f9e1ba83\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9qbmf" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.796906 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e59edbf8-41cb-4d41-b012-a5e2dcf83df4-config\") pod \"kube-controller-manager-operator-78b949d7b-br8kn\" (UID: \"e59edbf8-41cb-4d41-b012-a5e2dcf83df4\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-br8kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.825990 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ff4cb9cc-d164-4771-8e5f-41acc28c25e6-trusted-ca\") pod \"ingress-operator-5b745b69d9-22m6s\" (UID: \"ff4cb9cc-d164-4771-8e5f-41acc28c25e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.826527 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/808ad22e-4629-4a7d-8613-b35fedecc2ed-etcd-client\") pod \"etcd-operator-b45778765-vgm6t\" (UID: \"808ad22e-4629-4a7d-8613-b35fedecc2ed\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.827142 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85d00b11-ed99-44ac-81b8-73d958bc4d3e-config\") pod \"controller-manager-879f6c89f-5vvr2\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.829260 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-vgm6t"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.829405 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.829485 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-br8kn"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.831440 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-g65qn"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.837905 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.839574 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-hc4kn"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.840701 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m8hln"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.841736 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-68slc"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.842735 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.843026 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.843166 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.844026 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-ctqxg"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.844124 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/30620276-02bb-4cea-a50e-36fc7d4689ae-etcd-client\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.853848 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-pqqkz"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.854156 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9qbmf"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.854334 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-gswpr"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.856309 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.860764 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.861134 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-h5fb2"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.868683 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-h5fb2" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.870845 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.873423 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-vndv7"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.874208 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-vndv7" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.875083 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-j7kc7"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.881706 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.896117 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w2p6c"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.896153 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.896164 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vtdlj"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.896173 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-swgqn"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.896183 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.896192 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-hgwwp"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.896201 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kkczr"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.888313 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.896283 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-j7kc7" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.899793 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-96c5q"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.901586 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.903401 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.906692 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fgbnd"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.908020 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6lshd"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.909717 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.910978 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.911749 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.912606 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-h5fb2"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.915182 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.917099 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.917692 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.917954 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-mdxjh"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.918997 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-sgr7v"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.919094 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.921453 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j54mq"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.922776 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d97wj"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.923625 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-g4684"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.924537 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.924853 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-kl45g"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.926065 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-j7kc7"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.927072 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cpw5s"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.928863 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-mdxjh"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.929452 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j"] Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.930607 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.938699 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.952854 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.962531 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.973754 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.978230 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.986172 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:36 crc kubenswrapper[4631]: I1204 17:30:36.998807 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.024910 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.026313 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.038222 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.077793 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.098397 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.119176 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.138541 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.158401 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.198362 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.234294 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzhcm\" (UniqueName: \"kubernetes.io/projected/9a38e196-88e0-4add-8e52-40b1d8eb79e9-kube-api-access-zzhcm\") pod \"machine-api-operator-5694c8668f-pdgsq\" (UID: \"9a38e196-88e0-4add-8e52-40b1d8eb79e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pdgsq" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.253777 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4rxv\" (UniqueName: \"kubernetes.io/projected/dbc61eb8-0a7c-4ea2-8f50-5a2522daa465-kube-api-access-l4rxv\") pod \"openshift-config-operator-7777fb866f-bvw8n\" (UID: \"dbc61eb8-0a7c-4ea2-8f50-5a2522daa465\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.257850 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.279008 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.297754 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.318085 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.336753 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.357718 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.378430 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.398147 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.398150 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.421335 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.433210 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-pdgsq" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.438474 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.457747 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.477928 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.499760 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.518729 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.537898 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.558346 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.578182 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.597539 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.618509 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n"] Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.619844 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.640555 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.640890 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-pdgsq"] Dec 04 17:30:37 crc kubenswrapper[4631]: W1204 17:30:37.648452 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9a38e196_88e0_4add_8e52_40b1d8eb79e9.slice/crio-ba74d181b7bfd5e018534b8a0b602649d2982ea43b41574a2f7809a67e757c6e WatchSource:0}: Error finding container ba74d181b7bfd5e018534b8a0b602649d2982ea43b41574a2f7809a67e757c6e: Status 404 returned error can't find the container with id ba74d181b7bfd5e018534b8a0b602649d2982ea43b41574a2f7809a67e757c6e Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.653508 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" event={"ID":"dbc61eb8-0a7c-4ea2-8f50-5a2522daa465","Type":"ContainerStarted","Data":"5bb67a757caf17da5b4ba714fd583cd6b478da54907b6cab6b210c8cf246076b"} Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.654729 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-pdgsq" event={"ID":"9a38e196-88e0-4add-8e52-40b1d8eb79e9","Type":"ContainerStarted","Data":"ba74d181b7bfd5e018534b8a0b602649d2982ea43b41574a2f7809a67e757c6e"} Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.655906 4631 request.go:700] Waited for 1.000286837s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/secrets?fieldSelector=metadata.name%3Dolm-operator-serviceaccount-dockercfg-rq7zk&limit=500&resourceVersion=0 Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.657761 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.678090 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.697734 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.721295 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.747114 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.758128 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.777568 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.798344 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.818401 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.837940 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.857681 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.877923 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.898213 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.918284 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.931039 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.938654 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.958302 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.977913 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 04 17:30:37 crc kubenswrapper[4631]: I1204 17:30:37.998256 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.017762 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.037716 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.057840 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.077418 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.097527 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.117842 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.137782 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.158828 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.177975 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.206783 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.218470 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.238200 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.258500 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.277905 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.317942 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gg7s\" (UniqueName: \"kubernetes.io/projected/f645aac4-2afb-45b9-8f28-dca8259c7278-kube-api-access-4gg7s\") pod \"console-operator-58897d9998-hgwwp\" (UID: \"f645aac4-2afb-45b9-8f28-dca8259c7278\") " pod="openshift-console-operator/console-operator-58897d9998-hgwwp" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.336615 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ff4cb9cc-d164-4771-8e5f-41acc28c25e6-bound-sa-token\") pod \"ingress-operator-5b745b69d9-22m6s\" (UID: \"ff4cb9cc-d164-4771-8e5f-41acc28c25e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.359464 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbdv8\" (UniqueName: \"kubernetes.io/projected/81559ff3-95e9-455f-9d90-46c5f1a981ce-kube-api-access-wbdv8\") pod \"console-f9d7485db-kl45g\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.374634 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5m6j\" (UniqueName: \"kubernetes.io/projected/832113b6-ef23-4863-8080-cea0494584e7-kube-api-access-q5m6j\") pod \"downloads-7954f5f757-g65qn\" (UID: \"832113b6-ef23-4863-8080-cea0494584e7\") " pod="openshift-console/downloads-7954f5f757-g65qn" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.390815 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b65mk\" (UniqueName: \"kubernetes.io/projected/f6f09a1b-f9c2-43ec-8222-1fa25a379095-kube-api-access-b65mk\") pod \"route-controller-manager-6576b87f9c-hdlxr\" (UID: \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.410806 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfqzx\" (UniqueName: \"kubernetes.io/projected/ff4cb9cc-d164-4771-8e5f-41acc28c25e6-kube-api-access-cfqzx\") pod \"ingress-operator-5b745b69d9-22m6s\" (UID: \"ff4cb9cc-d164-4771-8e5f-41acc28c25e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.434245 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klrg9\" (UniqueName: \"kubernetes.io/projected/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-kube-api-access-klrg9\") pod \"oauth-openshift-558db77b4-ctqxg\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.455217 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.460255 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4npp2\" (UniqueName: \"kubernetes.io/projected/db00623b-8c6a-45d1-ab8b-a7e4f81f64eb-kube-api-access-4npp2\") pod \"router-default-5444994796-glqcf\" (UID: \"db00623b-8c6a-45d1-ab8b-a7e4f81f64eb\") " pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.460413 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-hgwwp" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.464317 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-g65qn" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.478542 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.509912 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42w5f\" (UniqueName: \"kubernetes.io/projected/c436effe-de7b-4e3b-a61a-0ff4a7067363-kube-api-access-42w5f\") pod \"openshift-controller-manager-operator-756b6f6bc6-rsqp8\" (UID: \"c436effe-de7b-4e3b-a61a-0ff4a7067363\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rsqp8" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.517100 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njcf7\" (UniqueName: \"kubernetes.io/projected/85d00b11-ed99-44ac-81b8-73d958bc4d3e-kube-api-access-njcf7\") pod \"controller-manager-879f6c89f-5vvr2\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.546711 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2t6n\" (UniqueName: \"kubernetes.io/projected/30620276-02bb-4cea-a50e-36fc7d4689ae-kube-api-access-q2t6n\") pod \"apiserver-76f77b778f-hc4kn\" (UID: \"30620276-02bb-4cea-a50e-36fc7d4689ae\") " pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.572679 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvlbb\" (UniqueName: \"kubernetes.io/projected/808ad22e-4629-4a7d-8613-b35fedecc2ed-kube-api-access-gvlbb\") pod \"etcd-operator-b45778765-vgm6t\" (UID: \"808ad22e-4629-4a7d-8613-b35fedecc2ed\") " pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.581815 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.588080 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbt6f\" (UniqueName: \"kubernetes.io/projected/4ef93ccc-0327-4a63-8f44-4cb3f9e1ba83-kube-api-access-jbt6f\") pod \"cluster-samples-operator-665b6dd947-9qbmf\" (UID: \"4ef93ccc-0327-4a63-8f44-4cb3f9e1ba83\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9qbmf" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.598537 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.618766 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.639173 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.643453 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.658424 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.662417 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.677233 4631 request.go:700] Waited for 1.802739518s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmachine-config-server-dockercfg-qx5rd&limit=500&resourceVersion=0 Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.677395 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.679300 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.692931 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-ctqxg"] Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.697672 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.698077 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.699777 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.708421 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rsqp8" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.713490 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-g65qn"] Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.717976 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.737893 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.758241 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.766129 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e59edbf8-41cb-4d41-b012-a5e2dcf83df4-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-br8kn\" (UID: \"e59edbf8-41cb-4d41-b012-a5e2dcf83df4\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-br8kn" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.769151 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs\") pod \"network-metrics-daemon-8kcrj\" (UID: \"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\") " pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.775654 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/86903bd1-674d-4fa2-b9d1-dbc8f347b72b-metrics-certs\") pod \"network-metrics-daemon-8kcrj\" (UID: \"86903bd1-674d-4fa2-b9d1-dbc8f347b72b\") " pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.778671 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.798336 4631 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.817992 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.826019 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" Dec 04 17:30:38 crc kubenswrapper[4631]: W1204 17:30:38.829417 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddb00623b_8c6a_45d1_ab8b_a7e4f81f64eb.slice/crio-3c34c02a580eb51c40fd270e4e93811362b8b397abb1e6703aa8ee942bf438b0 WatchSource:0}: Error finding container 3c34c02a580eb51c40fd270e4e93811362b8b397abb1e6703aa8ee942bf438b0: Status 404 returned error can't find the container with id 3c34c02a580eb51c40fd270e4e93811362b8b397abb1e6703aa8ee942bf438b0 Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.837419 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9qbmf" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.870223 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d253a0a7-4cc1-472f-b741-76da282b7672-config\") pod \"kube-apiserver-operator-766d6c64bb-cpw5s\" (UID: \"d253a0a7-4cc1-472f-b741-76da282b7672\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cpw5s" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.870262 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d253a0a7-4cc1-472f-b741-76da282b7672-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-cpw5s\" (UID: \"d253a0a7-4cc1-472f-b741-76da282b7672\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cpw5s" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.870286 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37b1b8a1-6ccc-4600-9430-32196ec1c9a4-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w2p6c\" (UID: \"37b1b8a1-6ccc-4600-9430-32196ec1c9a4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w2p6c" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.870345 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/37b1b8a1-6ccc-4600-9430-32196ec1c9a4-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w2p6c\" (UID: \"37b1b8a1-6ccc-4600-9430-32196ec1c9a4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w2p6c" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.870364 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87263d28-d75f-4701-9ac1-576084547adf-metrics-tls\") pod \"dns-operator-744455d44c-swgqn\" (UID: \"87263d28-d75f-4701-9ac1-576084547adf\") " pod="openshift-dns-operator/dns-operator-744455d44c-swgqn" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.870938 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/52770a8d-d215-4fa8-8469-95a315e44850-registry-certificates\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871020 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/925f71fa-9882-47d0-9708-0c34ebb51df8-encryption-config\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871099 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/543fff0a-4d3c-4bfd-9239-32a0f80a4092-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-h4gb7\" (UID: \"543fff0a-4d3c-4bfd-9239-32a0f80a4092\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871160 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psg4c\" (UniqueName: \"kubernetes.io/projected/543fff0a-4d3c-4bfd-9239-32a0f80a4092-kube-api-access-psg4c\") pod \"cluster-image-registry-operator-dc59b4c8b-h4gb7\" (UID: \"543fff0a-4d3c-4bfd-9239-32a0f80a4092\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871218 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4758s\" (UniqueName: \"kubernetes.io/projected/3c27a704-3b03-4210-bbbf-7179dbfe39ff-kube-api-access-4758s\") pod \"authentication-operator-69f744f599-z2968\" (UID: \"3c27a704-3b03-4210-bbbf-7179dbfe39ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871243 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/925f71fa-9882-47d0-9708-0c34ebb51df8-etcd-client\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871294 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/925f71fa-9882-47d0-9708-0c34ebb51df8-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871333 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/925f71fa-9882-47d0-9708-0c34ebb51df8-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871418 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9q77\" (UniqueName: \"kubernetes.io/projected/87263d28-d75f-4701-9ac1-576084547adf-kube-api-access-c9q77\") pod \"dns-operator-744455d44c-swgqn\" (UID: \"87263d28-d75f-4701-9ac1-576084547adf\") " pod="openshift-dns-operator/dns-operator-744455d44c-swgqn" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871460 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/52770a8d-d215-4fa8-8469-95a315e44850-registry-tls\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871495 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/37b1b8a1-6ccc-4600-9430-32196ec1c9a4-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w2p6c\" (UID: \"37b1b8a1-6ccc-4600-9430-32196ec1c9a4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w2p6c" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871522 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c27a704-3b03-4210-bbbf-7179dbfe39ff-config\") pod \"authentication-operator-69f744f599-z2968\" (UID: \"3c27a704-3b03-4210-bbbf-7179dbfe39ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871560 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c27a704-3b03-4210-bbbf-7179dbfe39ff-service-ca-bundle\") pod \"authentication-operator-69f744f599-z2968\" (UID: \"3c27a704-3b03-4210-bbbf-7179dbfe39ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871579 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d253a0a7-4cc1-472f-b741-76da282b7672-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-cpw5s\" (UID: \"d253a0a7-4cc1-472f-b741-76da282b7672\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cpw5s" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871603 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/925f71fa-9882-47d0-9708-0c34ebb51df8-audit-policies\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871622 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/925f71fa-9882-47d0-9708-0c34ebb51df8-audit-dir\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871645 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c27a704-3b03-4210-bbbf-7179dbfe39ff-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-z2968\" (UID: \"3c27a704-3b03-4210-bbbf-7179dbfe39ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871669 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/543fff0a-4d3c-4bfd-9239-32a0f80a4092-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-h4gb7\" (UID: \"543fff0a-4d3c-4bfd-9239-32a0f80a4092\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871724 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdbcd\" (UniqueName: \"kubernetes.io/projected/52770a8d-d215-4fa8-8469-95a315e44850-kube-api-access-jdbcd\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871747 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/52770a8d-d215-4fa8-8469-95a315e44850-installation-pull-secrets\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871809 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871898 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3c27a704-3b03-4210-bbbf-7179dbfe39ff-serving-cert\") pod \"authentication-operator-69f744f599-z2968\" (UID: \"3c27a704-3b03-4210-bbbf-7179dbfe39ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871919 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlsv7\" (UniqueName: \"kubernetes.io/projected/925f71fa-9882-47d0-9708-0c34ebb51df8-kube-api-access-vlsv7\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871936 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/543fff0a-4d3c-4bfd-9239-32a0f80a4092-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-h4gb7\" (UID: \"543fff0a-4d3c-4bfd-9239-32a0f80a4092\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.871960 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/52770a8d-d215-4fa8-8469-95a315e44850-trusted-ca\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.872016 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/52770a8d-d215-4fa8-8469-95a315e44850-ca-trust-extracted\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.872032 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/52770a8d-d215-4fa8-8469-95a315e44850-bound-sa-token\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.872046 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/925f71fa-9882-47d0-9708-0c34ebb51df8-serving-cert\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: E1204 17:30:38.878878 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:39.378846796 +0000 UTC m=+169.411088794 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.934175 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-hgwwp"] Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.953499 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8kcrj" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.973352 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.974236 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87263d28-d75f-4701-9ac1-576084547adf-metrics-tls\") pod \"dns-operator-744455d44c-swgqn\" (UID: \"87263d28-d75f-4701-9ac1-576084547adf\") " pod="openshift-dns-operator/dns-operator-744455d44c-swgqn" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.974269 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfhjw\" (UniqueName: \"kubernetes.io/projected/446a34a0-fac2-477b-8186-db8ec5fbdf5c-kube-api-access-lfhjw\") pod \"packageserver-d55dfcdfc-xhq8j\" (UID: \"446a34a0-fac2-477b-8186-db8ec5fbdf5c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.974296 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0e506b06-8627-4340-b64b-13a003f52d0d-signing-cabundle\") pod \"service-ca-9c57cc56f-pqqkz\" (UID: \"0e506b06-8627-4340-b64b-13a003f52d0d\") " pod="openshift-service-ca/service-ca-9c57cc56f-pqqkz" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.974314 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/925f71fa-9882-47d0-9708-0c34ebb51df8-encryption-config\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: E1204 17:30:38.974331 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:39.47431206 +0000 UTC m=+169.506554058 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.974382 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzflr\" (UniqueName: \"kubernetes.io/projected/f495761a-e318-4f2a-a41a-7baa61110fe7-kube-api-access-vzflr\") pod \"package-server-manager-789f6589d5-kkczr\" (UID: \"f495761a-e318-4f2a-a41a-7baa61110fe7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kkczr" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.974415 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/37486438-b8c6-4b20-911f-f6e66393f414-auth-proxy-config\") pod \"machine-config-operator-74547568cd-qxmc5\" (UID: \"37486438-b8c6-4b20-911f-f6e66393f414\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.974443 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/6689754f-f13b-48f7-89a6-353ef119cd75-srv-cert\") pod \"catalog-operator-68c6474976-vtdlj\" (UID: \"6689754f-f13b-48f7-89a6-353ef119cd75\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vtdlj" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.974470 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daf920af-78e8-4fab-893e-f9a95d7315fe-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-fgbnd\" (UID: \"daf920af-78e8-4fab-893e-f9a95d7315fe\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fgbnd" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.974511 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/925f71fa-9882-47d0-9708-0c34ebb51df8-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.974534 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/925f71fa-9882-47d0-9708-0c34ebb51df8-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.974549 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4212359b-081e-4b11-8ca7-87cb9ff33a1c-secret-volume\") pod \"collect-profiles-29414490-4s8gs\" (UID: \"4212359b-081e-4b11-8ca7-87cb9ff33a1c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.974565 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/200bbd12-8133-46c5-a69f-ed2dd0b5e191-registration-dir\") pod \"csi-hostpathplugin-mdxjh\" (UID: \"200bbd12-8133-46c5-a69f-ed2dd0b5e191\") " pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.974581 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c6867584-8090-43a9-bf55-f3fda830e5f0-proxy-tls\") pod \"machine-config-controller-84d6567774-sgr7v\" (UID: \"c6867584-8090-43a9-bf55-f3fda830e5f0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sgr7v" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.974601 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/52770a8d-d215-4fa8-8469-95a315e44850-registry-tls\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.974615 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/37486438-b8c6-4b20-911f-f6e66393f414-images\") pod \"machine-config-operator-74547568cd-qxmc5\" (UID: \"37486438-b8c6-4b20-911f-f6e66393f414\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.974632 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/daf920af-78e8-4fab-893e-f9a95d7315fe-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-fgbnd\" (UID: \"daf920af-78e8-4fab-893e-f9a95d7315fe\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fgbnd" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975079 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9409ba61-da1b-4587-8b49-c6f70b109e4d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-96c5q\" (UID: \"9409ba61-da1b-4587-8b49-c6f70b109e4d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-96c5q" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975110 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c27a704-3b03-4210-bbbf-7179dbfe39ff-service-ca-bundle\") pod \"authentication-operator-69f744f599-z2968\" (UID: \"3c27a704-3b03-4210-bbbf-7179dbfe39ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975127 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wp8v8\" (UniqueName: \"kubernetes.io/projected/2eb4ea57-e198-45b1-8f63-d95d4a223362-kube-api-access-wp8v8\") pod \"service-ca-operator-777779d784-g4684\" (UID: \"2eb4ea57-e198-45b1-8f63-d95d4a223362\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-g4684" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975165 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjvp6\" (UniqueName: \"kubernetes.io/projected/db12e396-cc65-44e6-89f4-e4458958f443-kube-api-access-cjvp6\") pod \"migrator-59844c95c7-6lshd\" (UID: \"db12e396-cc65-44e6-89f4-e4458958f443\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6lshd" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975183 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/543fff0a-4d3c-4bfd-9239-32a0f80a4092-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-h4gb7\" (UID: \"543fff0a-4d3c-4bfd-9239-32a0f80a4092\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975210 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c27a704-3b03-4210-bbbf-7179dbfe39ff-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-z2968\" (UID: \"3c27a704-3b03-4210-bbbf-7179dbfe39ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975264 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdbcd\" (UniqueName: \"kubernetes.io/projected/52770a8d-d215-4fa8-8469-95a315e44850-kube-api-access-jdbcd\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975300 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3c27a704-3b03-4210-bbbf-7179dbfe39ff-serving-cert\") pod \"authentication-operator-69f744f599-z2968\" (UID: \"3c27a704-3b03-4210-bbbf-7179dbfe39ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975315 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g49sm\" (UniqueName: \"kubernetes.io/projected/200bbd12-8133-46c5-a69f-ed2dd0b5e191-kube-api-access-g49sm\") pod \"csi-hostpathplugin-mdxjh\" (UID: \"200bbd12-8133-46c5-a69f-ed2dd0b5e191\") " pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975343 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/446a34a0-fac2-477b-8186-db8ec5fbdf5c-tmpfs\") pod \"packageserver-d55dfcdfc-xhq8j\" (UID: \"446a34a0-fac2-477b-8186-db8ec5fbdf5c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975360 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbmq2\" (UniqueName: \"kubernetes.io/projected/c6867584-8090-43a9-bf55-f3fda830e5f0-kube-api-access-rbmq2\") pod \"machine-config-controller-84d6567774-sgr7v\" (UID: \"c6867584-8090-43a9-bf55-f3fda830e5f0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sgr7v" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975394 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1892d78f-ccb4-491b-8622-84340df88183-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-68slc\" (UID: \"1892d78f-ccb4-491b-8622-84340df88183\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-68slc" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975412 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7thzz\" (UniqueName: \"kubernetes.io/projected/0f92bdbc-4785-44bf-a91c-88fe53b02d2a-kube-api-access-7thzz\") pod \"control-plane-machine-set-operator-78cbb6b69f-gswpr\" (UID: \"0f92bdbc-4785-44bf-a91c-88fe53b02d2a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-gswpr" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975437 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz5jw\" (UniqueName: \"kubernetes.io/projected/6689754f-f13b-48f7-89a6-353ef119cd75-kube-api-access-tz5jw\") pod \"catalog-operator-68c6474976-vtdlj\" (UID: \"6689754f-f13b-48f7-89a6-353ef119cd75\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vtdlj" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975453 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/200bbd12-8133-46c5-a69f-ed2dd0b5e191-csi-data-dir\") pod \"csi-hostpathplugin-mdxjh\" (UID: \"200bbd12-8133-46c5-a69f-ed2dd0b5e191\") " pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975469 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/496d955a-3328-4f38-86db-e5c382672b27-machine-approver-tls\") pod \"machine-approver-56656f9798-4wt5l\" (UID: \"496d955a-3328-4f38-86db-e5c382672b27\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4wt5l" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975508 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-d97wj\" (UID: \"dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a\") " pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975574 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/52770a8d-d215-4fa8-8469-95a315e44850-ca-trust-extracted\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975590 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/52770a8d-d215-4fa8-8469-95a315e44850-bound-sa-token\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975605 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6b035e50-4c55-4081-8ff2-3f720a8fa1b4-config-volume\") pod \"dns-default-h5fb2\" (UID: \"6b035e50-4c55-4081-8ff2-3f720a8fa1b4\") " pod="openshift-dns/dns-default-h5fb2" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975639 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7b362a1b-58aa-418c-be9b-b58655c4fe77-srv-cert\") pod \"olm-operator-6b444d44fb-j54mq\" (UID: \"7b362a1b-58aa-418c-be9b-b58655c4fe77\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j54mq" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975858 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/0f92bdbc-4785-44bf-a91c-88fe53b02d2a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-gswpr\" (UID: \"0f92bdbc-4785-44bf-a91c-88fe53b02d2a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-gswpr" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975876 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6b035e50-4c55-4081-8ff2-3f720a8fa1b4-metrics-tls\") pod \"dns-default-h5fb2\" (UID: \"6b035e50-4c55-4081-8ff2-3f720a8fa1b4\") " pod="openshift-dns/dns-default-h5fb2" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975912 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d253a0a7-4cc1-472f-b741-76da282b7672-config\") pod \"kube-apiserver-operator-766d6c64bb-cpw5s\" (UID: \"d253a0a7-4cc1-472f-b741-76da282b7672\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cpw5s" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975926 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d253a0a7-4cc1-472f-b741-76da282b7672-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-cpw5s\" (UID: \"d253a0a7-4cc1-472f-b741-76da282b7672\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cpw5s" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975946 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37b1b8a1-6ccc-4600-9430-32196ec1c9a4-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w2p6c\" (UID: \"37b1b8a1-6ccc-4600-9430-32196ec1c9a4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w2p6c" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975961 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/37b1b8a1-6ccc-4600-9430-32196ec1c9a4-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w2p6c\" (UID: \"37b1b8a1-6ccc-4600-9430-32196ec1c9a4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w2p6c" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975981 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhlqn\" (UniqueName: \"kubernetes.io/projected/4212359b-081e-4b11-8ca7-87cb9ff33a1c-kube-api-access-mhlqn\") pod \"collect-profiles-29414490-4s8gs\" (UID: \"4212359b-081e-4b11-8ca7-87cb9ff33a1c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.975998 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zm52l\" (UniqueName: \"kubernetes.io/projected/0e506b06-8627-4340-b64b-13a003f52d0d-kube-api-access-zm52l\") pod \"service-ca-9c57cc56f-pqqkz\" (UID: \"0e506b06-8627-4340-b64b-13a003f52d0d\") " pod="openshift-service-ca/service-ca-9c57cc56f-pqqkz" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976025 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/52770a8d-d215-4fa8-8469-95a315e44850-registry-certificates\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976067 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/543fff0a-4d3c-4bfd-9239-32a0f80a4092-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-h4gb7\" (UID: \"543fff0a-4d3c-4bfd-9239-32a0f80a4092\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976083 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psg4c\" (UniqueName: \"kubernetes.io/projected/543fff0a-4d3c-4bfd-9239-32a0f80a4092-kube-api-access-psg4c\") pod \"cluster-image-registry-operator-dc59b4c8b-h4gb7\" (UID: \"543fff0a-4d3c-4bfd-9239-32a0f80a4092\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976098 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0e506b06-8627-4340-b64b-13a003f52d0d-signing-key\") pod \"service-ca-9c57cc56f-pqqkz\" (UID: \"0e506b06-8627-4340-b64b-13a003f52d0d\") " pod="openshift-service-ca/service-ca-9c57cc56f-pqqkz" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976153 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6-certs\") pod \"machine-config-server-vndv7\" (UID: \"14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6\") " pod="openshift-machine-config-operator/machine-config-server-vndv7" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976178 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4758s\" (UniqueName: \"kubernetes.io/projected/3c27a704-3b03-4210-bbbf-7179dbfe39ff-kube-api-access-4758s\") pod \"authentication-operator-69f744f599-z2968\" (UID: \"3c27a704-3b03-4210-bbbf-7179dbfe39ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976193 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/925f71fa-9882-47d0-9708-0c34ebb51df8-etcd-client\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976212 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rq89\" (UniqueName: \"kubernetes.io/projected/1892d78f-ccb4-491b-8622-84340df88183-kube-api-access-9rq89\") pod \"multus-admission-controller-857f4d67dd-68slc\" (UID: \"1892d78f-ccb4-491b-8622-84340df88183\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-68slc" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976231 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9q77\" (UniqueName: \"kubernetes.io/projected/87263d28-d75f-4701-9ac1-576084547adf-kube-api-access-c9q77\") pod \"dns-operator-744455d44c-swgqn\" (UID: \"87263d28-d75f-4701-9ac1-576084547adf\") " pod="openshift-dns-operator/dns-operator-744455d44c-swgqn" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976247 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/446a34a0-fac2-477b-8186-db8ec5fbdf5c-webhook-cert\") pod \"packageserver-d55dfcdfc-xhq8j\" (UID: \"446a34a0-fac2-477b-8186-db8ec5fbdf5c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976272 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/37b1b8a1-6ccc-4600-9430-32196ec1c9a4-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w2p6c\" (UID: \"37b1b8a1-6ccc-4600-9430-32196ec1c9a4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w2p6c" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976329 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c27a704-3b03-4210-bbbf-7179dbfe39ff-config\") pod \"authentication-operator-69f744f599-z2968\" (UID: \"3c27a704-3b03-4210-bbbf-7179dbfe39ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976381 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/6689754f-f13b-48f7-89a6-353ef119cd75-profile-collector-cert\") pod \"catalog-operator-68c6474976-vtdlj\" (UID: \"6689754f-f13b-48f7-89a6-353ef119cd75\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vtdlj" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976399 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7b362a1b-58aa-418c-be9b-b58655c4fe77-profile-collector-cert\") pod \"olm-operator-6b444d44fb-j54mq\" (UID: \"7b362a1b-58aa-418c-be9b-b58655c4fe77\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j54mq" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976441 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llffc\" (UniqueName: \"kubernetes.io/projected/6b035e50-4c55-4081-8ff2-3f720a8fa1b4-kube-api-access-llffc\") pod \"dns-default-h5fb2\" (UID: \"6b035e50-4c55-4081-8ff2-3f720a8fa1b4\") " pod="openshift-dns/dns-default-h5fb2" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976458 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d253a0a7-4cc1-472f-b741-76da282b7672-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-cpw5s\" (UID: \"d253a0a7-4cc1-472f-b741-76da282b7672\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cpw5s" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976475 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9409ba61-da1b-4587-8b49-c6f70b109e4d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-96c5q\" (UID: \"9409ba61-da1b-4587-8b49-c6f70b109e4d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-96c5q" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976490 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/200bbd12-8133-46c5-a69f-ed2dd0b5e191-mountpoint-dir\") pod \"csi-hostpathplugin-mdxjh\" (UID: \"200bbd12-8133-46c5-a69f-ed2dd0b5e191\") " pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976507 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/925f71fa-9882-47d0-9708-0c34ebb51df8-audit-dir\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976542 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/925f71fa-9882-47d0-9708-0c34ebb51df8-audit-policies\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976568 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wj7m\" (UniqueName: \"kubernetes.io/projected/9409ba61-da1b-4587-8b49-c6f70b109e4d-kube-api-access-4wj7m\") pod \"openshift-apiserver-operator-796bbdcf4f-96c5q\" (UID: \"9409ba61-da1b-4587-8b49-c6f70b109e4d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-96c5q" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976583 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/200bbd12-8133-46c5-a69f-ed2dd0b5e191-plugins-dir\") pod \"csi-hostpathplugin-mdxjh\" (UID: \"200bbd12-8133-46c5-a69f-ed2dd0b5e191\") " pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976598 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4212359b-081e-4b11-8ca7-87cb9ff33a1c-config-volume\") pod \"collect-profiles-29414490-4s8gs\" (UID: \"4212359b-081e-4b11-8ca7-87cb9ff33a1c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976624 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f4473144-1622-451f-94f6-b2d878657bfd-cert\") pod \"ingress-canary-j7kc7\" (UID: \"f4473144-1622-451f-94f6-b2d878657bfd\") " pod="openshift-ingress-canary/ingress-canary-j7kc7" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976640 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnrc2\" (UniqueName: \"kubernetes.io/projected/dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a-kube-api-access-fnrc2\") pod \"marketplace-operator-79b997595-d97wj\" (UID: \"dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a\") " pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976664 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksbd6\" (UniqueName: \"kubernetes.io/projected/496d955a-3328-4f38-86db-e5c382672b27-kube-api-access-ksbd6\") pod \"machine-approver-56656f9798-4wt5l\" (UID: \"496d955a-3328-4f38-86db-e5c382672b27\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4wt5l" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976692 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/52770a8d-d215-4fa8-8469-95a315e44850-installation-pull-secrets\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976707 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2eb4ea57-e198-45b1-8f63-d95d4a223362-config\") pod \"service-ca-operator-777779d784-g4684\" (UID: \"2eb4ea57-e198-45b1-8f63-d95d4a223362\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-g4684" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976745 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976771 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/446a34a0-fac2-477b-8186-db8ec5fbdf5c-apiservice-cert\") pod \"packageserver-d55dfcdfc-xhq8j\" (UID: \"446a34a0-fac2-477b-8186-db8ec5fbdf5c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976789 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/496d955a-3328-4f38-86db-e5c382672b27-auth-proxy-config\") pod \"machine-approver-56656f9798-4wt5l\" (UID: \"496d955a-3328-4f38-86db-e5c382672b27\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4wt5l" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976836 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlsv7\" (UniqueName: \"kubernetes.io/projected/925f71fa-9882-47d0-9708-0c34ebb51df8-kube-api-access-vlsv7\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976853 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/543fff0a-4d3c-4bfd-9239-32a0f80a4092-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-h4gb7\" (UID: \"543fff0a-4d3c-4bfd-9239-32a0f80a4092\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976869 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6-node-bootstrap-token\") pod \"machine-config-server-vndv7\" (UID: \"14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6\") " pod="openshift-machine-config-operator/machine-config-server-vndv7" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976884 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26vhf\" (UniqueName: \"kubernetes.io/projected/7b362a1b-58aa-418c-be9b-b58655c4fe77-kube-api-access-26vhf\") pod \"olm-operator-6b444d44fb-j54mq\" (UID: \"7b362a1b-58aa-418c-be9b-b58655c4fe77\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j54mq" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976899 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4h27\" (UniqueName: \"kubernetes.io/projected/14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6-kube-api-access-v4h27\") pod \"machine-config-server-vndv7\" (UID: \"14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6\") " pod="openshift-machine-config-operator/machine-config-server-vndv7" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976957 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/52770a8d-d215-4fa8-8469-95a315e44850-trusted-ca\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.976989 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496d955a-3328-4f38-86db-e5c382672b27-config\") pod \"machine-approver-56656f9798-4wt5l\" (UID: \"496d955a-3328-4f38-86db-e5c382672b27\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4wt5l" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.977004 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/f495761a-e318-4f2a-a41a-7baa61110fe7-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-kkczr\" (UID: \"f495761a-e318-4f2a-a41a-7baa61110fe7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kkczr" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.977021 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rw8x5\" (UniqueName: \"kubernetes.io/projected/37486438-b8c6-4b20-911f-f6e66393f414-kube-api-access-rw8x5\") pod \"machine-config-operator-74547568cd-qxmc5\" (UID: \"37486438-b8c6-4b20-911f-f6e66393f414\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.977041 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/925f71fa-9882-47d0-9708-0c34ebb51df8-serving-cert\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.977056 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2eb4ea57-e198-45b1-8f63-d95d4a223362-serving-cert\") pod \"service-ca-operator-777779d784-g4684\" (UID: \"2eb4ea57-e198-45b1-8f63-d95d4a223362\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-g4684" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.977074 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-d97wj\" (UID: \"dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a\") " pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.977090 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c6867584-8090-43a9-bf55-f3fda830e5f0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-sgr7v\" (UID: \"c6867584-8090-43a9-bf55-f3fda830e5f0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sgr7v" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.977129 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/200bbd12-8133-46c5-a69f-ed2dd0b5e191-socket-dir\") pod \"csi-hostpathplugin-mdxjh\" (UID: \"200bbd12-8133-46c5-a69f-ed2dd0b5e191\") " pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.977154 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/37486438-b8c6-4b20-911f-f6e66393f414-proxy-tls\") pod \"machine-config-operator-74547568cd-qxmc5\" (UID: \"37486438-b8c6-4b20-911f-f6e66393f414\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.977184 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-548f4\" (UniqueName: \"kubernetes.io/projected/daf920af-78e8-4fab-893e-f9a95d7315fe-kube-api-access-548f4\") pod \"kube-storage-version-migrator-operator-b67b599dd-fgbnd\" (UID: \"daf920af-78e8-4fab-893e-f9a95d7315fe\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fgbnd" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.977200 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnfg2\" (UniqueName: \"kubernetes.io/projected/f4473144-1622-451f-94f6-b2d878657bfd-kube-api-access-dnfg2\") pod \"ingress-canary-j7kc7\" (UID: \"f4473144-1622-451f-94f6-b2d878657bfd\") " pod="openshift-ingress-canary/ingress-canary-j7kc7" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.980257 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/925f71fa-9882-47d0-9708-0c34ebb51df8-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.980940 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/925f71fa-9882-47d0-9708-0c34ebb51df8-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.981591 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c27a704-3b03-4210-bbbf-7179dbfe39ff-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-z2968\" (UID: \"3c27a704-3b03-4210-bbbf-7179dbfe39ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.984471 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/52770a8d-d215-4fa8-8469-95a315e44850-ca-trust-extracted\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:38 crc kubenswrapper[4631]: E1204 17:30:38.985598 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:39.485583315 +0000 UTC m=+169.517825313 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.993571 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c27a704-3b03-4210-bbbf-7179dbfe39ff-config\") pod \"authentication-operator-69f744f599-z2968\" (UID: \"3c27a704-3b03-4210-bbbf-7179dbfe39ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.995019 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d253a0a7-4cc1-472f-b741-76da282b7672-config\") pod \"kube-apiserver-operator-766d6c64bb-cpw5s\" (UID: \"d253a0a7-4cc1-472f-b741-76da282b7672\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cpw5s" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.997087 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87263d28-d75f-4701-9ac1-576084547adf-metrics-tls\") pod \"dns-operator-744455d44c-swgqn\" (UID: \"87263d28-d75f-4701-9ac1-576084547adf\") " pod="openshift-dns-operator/dns-operator-744455d44c-swgqn" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.997290 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/925f71fa-9882-47d0-9708-0c34ebb51df8-encryption-config\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.998475 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/925f71fa-9882-47d0-9708-0c34ebb51df8-audit-dir\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:38 crc kubenswrapper[4631]: I1204 17:30:38.997503 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/52770a8d-d215-4fa8-8469-95a315e44850-trusted-ca\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.001111 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/925f71fa-9882-47d0-9708-0c34ebb51df8-audit-policies\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.002692 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/543fff0a-4d3c-4bfd-9239-32a0f80a4092-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-h4gb7\" (UID: \"543fff0a-4d3c-4bfd-9239-32a0f80a4092\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.002978 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/52770a8d-d215-4fa8-8469-95a315e44850-registry-certificates\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.003037 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c27a704-3b03-4210-bbbf-7179dbfe39ff-service-ca-bundle\") pod \"authentication-operator-69f744f599-z2968\" (UID: \"3c27a704-3b03-4210-bbbf-7179dbfe39ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.003275 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37b1b8a1-6ccc-4600-9430-32196ec1c9a4-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w2p6c\" (UID: \"37b1b8a1-6ccc-4600-9430-32196ec1c9a4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w2p6c" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.005637 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/52770a8d-d215-4fa8-8469-95a315e44850-installation-pull-secrets\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.010828 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/925f71fa-9882-47d0-9708-0c34ebb51df8-serving-cert\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.010865 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d253a0a7-4cc1-472f-b741-76da282b7672-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-cpw5s\" (UID: \"d253a0a7-4cc1-472f-b741-76da282b7672\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cpw5s" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.011969 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3c27a704-3b03-4210-bbbf-7179dbfe39ff-serving-cert\") pod \"authentication-operator-69f744f599-z2968\" (UID: \"3c27a704-3b03-4210-bbbf-7179dbfe39ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.012757 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/543fff0a-4d3c-4bfd-9239-32a0f80a4092-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-h4gb7\" (UID: \"543fff0a-4d3c-4bfd-9239-32a0f80a4092\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.013675 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/37b1b8a1-6ccc-4600-9430-32196ec1c9a4-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w2p6c\" (UID: \"37b1b8a1-6ccc-4600-9430-32196ec1c9a4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w2p6c" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.014640 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/925f71fa-9882-47d0-9708-0c34ebb51df8-etcd-client\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.015615 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/543fff0a-4d3c-4bfd-9239-32a0f80a4092-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-h4gb7\" (UID: \"543fff0a-4d3c-4bfd-9239-32a0f80a4092\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.028055 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/52770a8d-d215-4fa8-8469-95a315e44850-registry-tls\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.034051 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-kl45g"] Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.041289 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4758s\" (UniqueName: \"kubernetes.io/projected/3c27a704-3b03-4210-bbbf-7179dbfe39ff-kube-api-access-4758s\") pod \"authentication-operator-69f744f599-z2968\" (UID: \"3c27a704-3b03-4210-bbbf-7179dbfe39ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.048840 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-br8kn" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.063960 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdbcd\" (UniqueName: \"kubernetes.io/projected/52770a8d-d215-4fa8-8469-95a315e44850-kube-api-access-jdbcd\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.079559 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.079866 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfhjw\" (UniqueName: \"kubernetes.io/projected/446a34a0-fac2-477b-8186-db8ec5fbdf5c-kube-api-access-lfhjw\") pod \"packageserver-d55dfcdfc-xhq8j\" (UID: \"446a34a0-fac2-477b-8186-db8ec5fbdf5c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.079896 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0e506b06-8627-4340-b64b-13a003f52d0d-signing-cabundle\") pod \"service-ca-9c57cc56f-pqqkz\" (UID: \"0e506b06-8627-4340-b64b-13a003f52d0d\") " pod="openshift-service-ca/service-ca-9c57cc56f-pqqkz" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.079922 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzflr\" (UniqueName: \"kubernetes.io/projected/f495761a-e318-4f2a-a41a-7baa61110fe7-kube-api-access-vzflr\") pod \"package-server-manager-789f6589d5-kkczr\" (UID: \"f495761a-e318-4f2a-a41a-7baa61110fe7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kkczr" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.079948 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/37486438-b8c6-4b20-911f-f6e66393f414-auth-proxy-config\") pod \"machine-config-operator-74547568cd-qxmc5\" (UID: \"37486438-b8c6-4b20-911f-f6e66393f414\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080003 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/6689754f-f13b-48f7-89a6-353ef119cd75-srv-cert\") pod \"catalog-operator-68c6474976-vtdlj\" (UID: \"6689754f-f13b-48f7-89a6-353ef119cd75\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vtdlj" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080033 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daf920af-78e8-4fab-893e-f9a95d7315fe-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-fgbnd\" (UID: \"daf920af-78e8-4fab-893e-f9a95d7315fe\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fgbnd" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080057 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4212359b-081e-4b11-8ca7-87cb9ff33a1c-secret-volume\") pod \"collect-profiles-29414490-4s8gs\" (UID: \"4212359b-081e-4b11-8ca7-87cb9ff33a1c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080082 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/200bbd12-8133-46c5-a69f-ed2dd0b5e191-registration-dir\") pod \"csi-hostpathplugin-mdxjh\" (UID: \"200bbd12-8133-46c5-a69f-ed2dd0b5e191\") " pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080103 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c6867584-8090-43a9-bf55-f3fda830e5f0-proxy-tls\") pod \"machine-config-controller-84d6567774-sgr7v\" (UID: \"c6867584-8090-43a9-bf55-f3fda830e5f0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sgr7v" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080128 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/37486438-b8c6-4b20-911f-f6e66393f414-images\") pod \"machine-config-operator-74547568cd-qxmc5\" (UID: \"37486438-b8c6-4b20-911f-f6e66393f414\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080148 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/daf920af-78e8-4fab-893e-f9a95d7315fe-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-fgbnd\" (UID: \"daf920af-78e8-4fab-893e-f9a95d7315fe\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fgbnd" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080174 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9409ba61-da1b-4587-8b49-c6f70b109e4d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-96c5q\" (UID: \"9409ba61-da1b-4587-8b49-c6f70b109e4d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-96c5q" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080207 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wp8v8\" (UniqueName: \"kubernetes.io/projected/2eb4ea57-e198-45b1-8f63-d95d4a223362-kube-api-access-wp8v8\") pod \"service-ca-operator-777779d784-g4684\" (UID: \"2eb4ea57-e198-45b1-8f63-d95d4a223362\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-g4684" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080231 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjvp6\" (UniqueName: \"kubernetes.io/projected/db12e396-cc65-44e6-89f4-e4458958f443-kube-api-access-cjvp6\") pod \"migrator-59844c95c7-6lshd\" (UID: \"db12e396-cc65-44e6-89f4-e4458958f443\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6lshd" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080271 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g49sm\" (UniqueName: \"kubernetes.io/projected/200bbd12-8133-46c5-a69f-ed2dd0b5e191-kube-api-access-g49sm\") pod \"csi-hostpathplugin-mdxjh\" (UID: \"200bbd12-8133-46c5-a69f-ed2dd0b5e191\") " pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080293 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/446a34a0-fac2-477b-8186-db8ec5fbdf5c-tmpfs\") pod \"packageserver-d55dfcdfc-xhq8j\" (UID: \"446a34a0-fac2-477b-8186-db8ec5fbdf5c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080318 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1892d78f-ccb4-491b-8622-84340df88183-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-68slc\" (UID: \"1892d78f-ccb4-491b-8622-84340df88183\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-68slc" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080346 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7thzz\" (UniqueName: \"kubernetes.io/projected/0f92bdbc-4785-44bf-a91c-88fe53b02d2a-kube-api-access-7thzz\") pod \"control-plane-machine-set-operator-78cbb6b69f-gswpr\" (UID: \"0f92bdbc-4785-44bf-a91c-88fe53b02d2a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-gswpr" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080549 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbmq2\" (UniqueName: \"kubernetes.io/projected/c6867584-8090-43a9-bf55-f3fda830e5f0-kube-api-access-rbmq2\") pod \"machine-config-controller-84d6567774-sgr7v\" (UID: \"c6867584-8090-43a9-bf55-f3fda830e5f0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sgr7v" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080587 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz5jw\" (UniqueName: \"kubernetes.io/projected/6689754f-f13b-48f7-89a6-353ef119cd75-kube-api-access-tz5jw\") pod \"catalog-operator-68c6474976-vtdlj\" (UID: \"6689754f-f13b-48f7-89a6-353ef119cd75\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vtdlj" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080612 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/200bbd12-8133-46c5-a69f-ed2dd0b5e191-csi-data-dir\") pod \"csi-hostpathplugin-mdxjh\" (UID: \"200bbd12-8133-46c5-a69f-ed2dd0b5e191\") " pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080634 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/496d955a-3328-4f38-86db-e5c382672b27-machine-approver-tls\") pod \"machine-approver-56656f9798-4wt5l\" (UID: \"496d955a-3328-4f38-86db-e5c382672b27\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4wt5l" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080659 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-d97wj\" (UID: \"dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a\") " pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080720 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6b035e50-4c55-4081-8ff2-3f720a8fa1b4-config-volume\") pod \"dns-default-h5fb2\" (UID: \"6b035e50-4c55-4081-8ff2-3f720a8fa1b4\") " pod="openshift-dns/dns-default-h5fb2" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080748 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7b362a1b-58aa-418c-be9b-b58655c4fe77-srv-cert\") pod \"olm-operator-6b444d44fb-j54mq\" (UID: \"7b362a1b-58aa-418c-be9b-b58655c4fe77\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j54mq" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080775 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/0f92bdbc-4785-44bf-a91c-88fe53b02d2a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-gswpr\" (UID: \"0f92bdbc-4785-44bf-a91c-88fe53b02d2a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-gswpr" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080801 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6b035e50-4c55-4081-8ff2-3f720a8fa1b4-metrics-tls\") pod \"dns-default-h5fb2\" (UID: \"6b035e50-4c55-4081-8ff2-3f720a8fa1b4\") " pod="openshift-dns/dns-default-h5fb2" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080845 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhlqn\" (UniqueName: \"kubernetes.io/projected/4212359b-081e-4b11-8ca7-87cb9ff33a1c-kube-api-access-mhlqn\") pod \"collect-profiles-29414490-4s8gs\" (UID: \"4212359b-081e-4b11-8ca7-87cb9ff33a1c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080868 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zm52l\" (UniqueName: \"kubernetes.io/projected/0e506b06-8627-4340-b64b-13a003f52d0d-kube-api-access-zm52l\") pod \"service-ca-9c57cc56f-pqqkz\" (UID: \"0e506b06-8627-4340-b64b-13a003f52d0d\") " pod="openshift-service-ca/service-ca-9c57cc56f-pqqkz" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080906 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0e506b06-8627-4340-b64b-13a003f52d0d-signing-key\") pod \"service-ca-9c57cc56f-pqqkz\" (UID: \"0e506b06-8627-4340-b64b-13a003f52d0d\") " pod="openshift-service-ca/service-ca-9c57cc56f-pqqkz" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080931 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6-certs\") pod \"machine-config-server-vndv7\" (UID: \"14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6\") " pod="openshift-machine-config-operator/machine-config-server-vndv7" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080966 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rq89\" (UniqueName: \"kubernetes.io/projected/1892d78f-ccb4-491b-8622-84340df88183-kube-api-access-9rq89\") pod \"multus-admission-controller-857f4d67dd-68slc\" (UID: \"1892d78f-ccb4-491b-8622-84340df88183\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-68slc" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.080993 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/446a34a0-fac2-477b-8186-db8ec5fbdf5c-webhook-cert\") pod \"packageserver-d55dfcdfc-xhq8j\" (UID: \"446a34a0-fac2-477b-8186-db8ec5fbdf5c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081024 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/6689754f-f13b-48f7-89a6-353ef119cd75-profile-collector-cert\") pod \"catalog-operator-68c6474976-vtdlj\" (UID: \"6689754f-f13b-48f7-89a6-353ef119cd75\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vtdlj" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081047 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7b362a1b-58aa-418c-be9b-b58655c4fe77-profile-collector-cert\") pod \"olm-operator-6b444d44fb-j54mq\" (UID: \"7b362a1b-58aa-418c-be9b-b58655c4fe77\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j54mq" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081074 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llffc\" (UniqueName: \"kubernetes.io/projected/6b035e50-4c55-4081-8ff2-3f720a8fa1b4-kube-api-access-llffc\") pod \"dns-default-h5fb2\" (UID: \"6b035e50-4c55-4081-8ff2-3f720a8fa1b4\") " pod="openshift-dns/dns-default-h5fb2" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081097 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9409ba61-da1b-4587-8b49-c6f70b109e4d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-96c5q\" (UID: \"9409ba61-da1b-4587-8b49-c6f70b109e4d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-96c5q" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081119 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/200bbd12-8133-46c5-a69f-ed2dd0b5e191-mountpoint-dir\") pod \"csi-hostpathplugin-mdxjh\" (UID: \"200bbd12-8133-46c5-a69f-ed2dd0b5e191\") " pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081152 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wj7m\" (UniqueName: \"kubernetes.io/projected/9409ba61-da1b-4587-8b49-c6f70b109e4d-kube-api-access-4wj7m\") pod \"openshift-apiserver-operator-796bbdcf4f-96c5q\" (UID: \"9409ba61-da1b-4587-8b49-c6f70b109e4d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-96c5q" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081191 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/200bbd12-8133-46c5-a69f-ed2dd0b5e191-plugins-dir\") pod \"csi-hostpathplugin-mdxjh\" (UID: \"200bbd12-8133-46c5-a69f-ed2dd0b5e191\") " pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081216 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4212359b-081e-4b11-8ca7-87cb9ff33a1c-config-volume\") pod \"collect-profiles-29414490-4s8gs\" (UID: \"4212359b-081e-4b11-8ca7-87cb9ff33a1c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081241 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f4473144-1622-451f-94f6-b2d878657bfd-cert\") pod \"ingress-canary-j7kc7\" (UID: \"f4473144-1622-451f-94f6-b2d878657bfd\") " pod="openshift-ingress-canary/ingress-canary-j7kc7" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081263 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnrc2\" (UniqueName: \"kubernetes.io/projected/dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a-kube-api-access-fnrc2\") pod \"marketplace-operator-79b997595-d97wj\" (UID: \"dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a\") " pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081290 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksbd6\" (UniqueName: \"kubernetes.io/projected/496d955a-3328-4f38-86db-e5c382672b27-kube-api-access-ksbd6\") pod \"machine-approver-56656f9798-4wt5l\" (UID: \"496d955a-3328-4f38-86db-e5c382672b27\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4wt5l" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081320 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2eb4ea57-e198-45b1-8f63-d95d4a223362-config\") pod \"service-ca-operator-777779d784-g4684\" (UID: \"2eb4ea57-e198-45b1-8f63-d95d4a223362\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-g4684" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081363 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6-node-bootstrap-token\") pod \"machine-config-server-vndv7\" (UID: \"14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6\") " pod="openshift-machine-config-operator/machine-config-server-vndv7" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081403 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26vhf\" (UniqueName: \"kubernetes.io/projected/7b362a1b-58aa-418c-be9b-b58655c4fe77-kube-api-access-26vhf\") pod \"olm-operator-6b444d44fb-j54mq\" (UID: \"7b362a1b-58aa-418c-be9b-b58655c4fe77\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j54mq" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081426 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/446a34a0-fac2-477b-8186-db8ec5fbdf5c-apiservice-cert\") pod \"packageserver-d55dfcdfc-xhq8j\" (UID: \"446a34a0-fac2-477b-8186-db8ec5fbdf5c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081451 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/496d955a-3328-4f38-86db-e5c382672b27-auth-proxy-config\") pod \"machine-approver-56656f9798-4wt5l\" (UID: \"496d955a-3328-4f38-86db-e5c382672b27\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4wt5l" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081486 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4h27\" (UniqueName: \"kubernetes.io/projected/14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6-kube-api-access-v4h27\") pod \"machine-config-server-vndv7\" (UID: \"14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6\") " pod="openshift-machine-config-operator/machine-config-server-vndv7" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081516 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496d955a-3328-4f38-86db-e5c382672b27-config\") pod \"machine-approver-56656f9798-4wt5l\" (UID: \"496d955a-3328-4f38-86db-e5c382672b27\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4wt5l" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081540 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rw8x5\" (UniqueName: \"kubernetes.io/projected/37486438-b8c6-4b20-911f-f6e66393f414-kube-api-access-rw8x5\") pod \"machine-config-operator-74547568cd-qxmc5\" (UID: \"37486438-b8c6-4b20-911f-f6e66393f414\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081565 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/f495761a-e318-4f2a-a41a-7baa61110fe7-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-kkczr\" (UID: \"f495761a-e318-4f2a-a41a-7baa61110fe7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kkczr" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081589 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2eb4ea57-e198-45b1-8f63-d95d4a223362-serving-cert\") pod \"service-ca-operator-777779d784-g4684\" (UID: \"2eb4ea57-e198-45b1-8f63-d95d4a223362\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-g4684" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081614 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-d97wj\" (UID: \"dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a\") " pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081639 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c6867584-8090-43a9-bf55-f3fda830e5f0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-sgr7v\" (UID: \"c6867584-8090-43a9-bf55-f3fda830e5f0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sgr7v" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081662 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/200bbd12-8133-46c5-a69f-ed2dd0b5e191-socket-dir\") pod \"csi-hostpathplugin-mdxjh\" (UID: \"200bbd12-8133-46c5-a69f-ed2dd0b5e191\") " pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081687 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/37486438-b8c6-4b20-911f-f6e66393f414-proxy-tls\") pod \"machine-config-operator-74547568cd-qxmc5\" (UID: \"37486438-b8c6-4b20-911f-f6e66393f414\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081713 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-548f4\" (UniqueName: \"kubernetes.io/projected/daf920af-78e8-4fab-893e-f9a95d7315fe-kube-api-access-548f4\") pod \"kube-storage-version-migrator-operator-b67b599dd-fgbnd\" (UID: \"daf920af-78e8-4fab-893e-f9a95d7315fe\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fgbnd" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.081752 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnfg2\" (UniqueName: \"kubernetes.io/projected/f4473144-1622-451f-94f6-b2d878657bfd-kube-api-access-dnfg2\") pod \"ingress-canary-j7kc7\" (UID: \"f4473144-1622-451f-94f6-b2d878657bfd\") " pod="openshift-ingress-canary/ingress-canary-j7kc7" Dec 04 17:30:39 crc kubenswrapper[4631]: E1204 17:30:39.082042 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:39.582021956 +0000 UTC m=+169.614263954 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.083924 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2eb4ea57-e198-45b1-8f63-d95d4a223362-config\") pod \"service-ca-operator-777779d784-g4684\" (UID: \"2eb4ea57-e198-45b1-8f63-d95d4a223362\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-g4684" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.084099 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/446a34a0-fac2-477b-8186-db8ec5fbdf5c-tmpfs\") pod \"packageserver-d55dfcdfc-xhq8j\" (UID: \"446a34a0-fac2-477b-8186-db8ec5fbdf5c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.084802 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/37486438-b8c6-4b20-911f-f6e66393f414-auth-proxy-config\") pod \"machine-config-operator-74547568cd-qxmc5\" (UID: \"37486438-b8c6-4b20-911f-f6e66393f414\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.085406 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/37486438-b8c6-4b20-911f-f6e66393f414-images\") pod \"machine-config-operator-74547568cd-qxmc5\" (UID: \"37486438-b8c6-4b20-911f-f6e66393f414\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.089830 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6-certs\") pod \"machine-config-server-vndv7\" (UID: \"14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6\") " pod="openshift-machine-config-operator/machine-config-server-vndv7" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.093993 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0e506b06-8627-4340-b64b-13a003f52d0d-signing-key\") pod \"service-ca-9c57cc56f-pqqkz\" (UID: \"0e506b06-8627-4340-b64b-13a003f52d0d\") " pod="openshift-service-ca/service-ca-9c57cc56f-pqqkz" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.094776 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlsv7\" (UniqueName: \"kubernetes.io/projected/925f71fa-9882-47d0-9708-0c34ebb51df8-kube-api-access-vlsv7\") pod \"apiserver-7bbb656c7d-jn87j\" (UID: \"925f71fa-9882-47d0-9708-0c34ebb51df8\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.090776 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-d97wj\" (UID: \"dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a\") " pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.097464 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daf920af-78e8-4fab-893e-f9a95d7315fe-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-fgbnd\" (UID: \"daf920af-78e8-4fab-893e-f9a95d7315fe\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fgbnd" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.107129 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/200bbd12-8133-46c5-a69f-ed2dd0b5e191-registration-dir\") pod \"csi-hostpathplugin-mdxjh\" (UID: \"200bbd12-8133-46c5-a69f-ed2dd0b5e191\") " pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.114167 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6b035e50-4c55-4081-8ff2-3f720a8fa1b4-config-volume\") pod \"dns-default-h5fb2\" (UID: \"6b035e50-4c55-4081-8ff2-3f720a8fa1b4\") " pod="openshift-dns/dns-default-h5fb2" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.115111 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0e506b06-8627-4340-b64b-13a003f52d0d-signing-cabundle\") pod \"service-ca-9c57cc56f-pqqkz\" (UID: \"0e506b06-8627-4340-b64b-13a003f52d0d\") " pod="openshift-service-ca/service-ca-9c57cc56f-pqqkz" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.124365 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/200bbd12-8133-46c5-a69f-ed2dd0b5e191-csi-data-dir\") pod \"csi-hostpathplugin-mdxjh\" (UID: \"200bbd12-8133-46c5-a69f-ed2dd0b5e191\") " pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.124701 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6b035e50-4c55-4081-8ff2-3f720a8fa1b4-metrics-tls\") pod \"dns-default-h5fb2\" (UID: \"6b035e50-4c55-4081-8ff2-3f720a8fa1b4\") " pod="openshift-dns/dns-default-h5fb2" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.125257 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9q77\" (UniqueName: \"kubernetes.io/projected/87263d28-d75f-4701-9ac1-576084547adf-kube-api-access-c9q77\") pod \"dns-operator-744455d44c-swgqn\" (UID: \"87263d28-d75f-4701-9ac1-576084547adf\") " pod="openshift-dns-operator/dns-operator-744455d44c-swgqn" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.125905 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9409ba61-da1b-4587-8b49-c6f70b109e4d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-96c5q\" (UID: \"9409ba61-da1b-4587-8b49-c6f70b109e4d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-96c5q" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.126576 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/200bbd12-8133-46c5-a69f-ed2dd0b5e191-mountpoint-dir\") pod \"csi-hostpathplugin-mdxjh\" (UID: \"200bbd12-8133-46c5-a69f-ed2dd0b5e191\") " pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.127011 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/200bbd12-8133-46c5-a69f-ed2dd0b5e191-plugins-dir\") pod \"csi-hostpathplugin-mdxjh\" (UID: \"200bbd12-8133-46c5-a69f-ed2dd0b5e191\") " pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.127298 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496d955a-3328-4f38-86db-e5c382672b27-config\") pod \"machine-approver-56656f9798-4wt5l\" (UID: \"496d955a-3328-4f38-86db-e5c382672b27\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4wt5l" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.127459 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c6867584-8090-43a9-bf55-f3fda830e5f0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-sgr7v\" (UID: \"c6867584-8090-43a9-bf55-f3fda830e5f0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sgr7v" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.127543 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/200bbd12-8133-46c5-a69f-ed2dd0b5e191-socket-dir\") pod \"csi-hostpathplugin-mdxjh\" (UID: \"200bbd12-8133-46c5-a69f-ed2dd0b5e191\") " pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.128009 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/6689754f-f13b-48f7-89a6-353ef119cd75-srv-cert\") pod \"catalog-operator-68c6474976-vtdlj\" (UID: \"6689754f-f13b-48f7-89a6-353ef119cd75\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vtdlj" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.130217 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/446a34a0-fac2-477b-8186-db8ec5fbdf5c-webhook-cert\") pod \"packageserver-d55dfcdfc-xhq8j\" (UID: \"446a34a0-fac2-477b-8186-db8ec5fbdf5c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.128046 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4212359b-081e-4b11-8ca7-87cb9ff33a1c-config-volume\") pod \"collect-profiles-29414490-4s8gs\" (UID: \"4212359b-081e-4b11-8ca7-87cb9ff33a1c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.133921 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9409ba61-da1b-4587-8b49-c6f70b109e4d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-96c5q\" (UID: \"9409ba61-da1b-4587-8b49-c6f70b109e4d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-96c5q" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.135109 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2eb4ea57-e198-45b1-8f63-d95d4a223362-serving-cert\") pod \"service-ca-operator-777779d784-g4684\" (UID: \"2eb4ea57-e198-45b1-8f63-d95d4a223362\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-g4684" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.135320 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/496d955a-3328-4f38-86db-e5c382672b27-auth-proxy-config\") pod \"machine-approver-56656f9798-4wt5l\" (UID: \"496d955a-3328-4f38-86db-e5c382672b27\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4wt5l" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.140036 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/496d955a-3328-4f38-86db-e5c382672b27-machine-approver-tls\") pod \"machine-approver-56656f9798-4wt5l\" (UID: \"496d955a-3328-4f38-86db-e5c382672b27\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4wt5l" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.140660 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6-node-bootstrap-token\") pod \"machine-config-server-vndv7\" (UID: \"14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6\") " pod="openshift-machine-config-operator/machine-config-server-vndv7" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.141845 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/0f92bdbc-4785-44bf-a91c-88fe53b02d2a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-gswpr\" (UID: \"0f92bdbc-4785-44bf-a91c-88fe53b02d2a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-gswpr" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.145077 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/daf920af-78e8-4fab-893e-f9a95d7315fe-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-fgbnd\" (UID: \"daf920af-78e8-4fab-893e-f9a95d7315fe\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fgbnd" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.147038 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4212359b-081e-4b11-8ca7-87cb9ff33a1c-secret-volume\") pod \"collect-profiles-29414490-4s8gs\" (UID: \"4212359b-081e-4b11-8ca7-87cb9ff33a1c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.151972 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/f495761a-e318-4f2a-a41a-7baa61110fe7-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-kkczr\" (UID: \"f495761a-e318-4f2a-a41a-7baa61110fe7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kkczr" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.152277 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7b362a1b-58aa-418c-be9b-b58655c4fe77-srv-cert\") pod \"olm-operator-6b444d44fb-j54mq\" (UID: \"7b362a1b-58aa-418c-be9b-b58655c4fe77\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j54mq" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.153932 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/6689754f-f13b-48f7-89a6-353ef119cd75-profile-collector-cert\") pod \"catalog-operator-68c6474976-vtdlj\" (UID: \"6689754f-f13b-48f7-89a6-353ef119cd75\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vtdlj" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.153985 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7b362a1b-58aa-418c-be9b-b58655c4fe77-profile-collector-cert\") pod \"olm-operator-6b444d44fb-j54mq\" (UID: \"7b362a1b-58aa-418c-be9b-b58655c4fe77\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j54mq" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.154116 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/52770a8d-d215-4fa8-8469-95a315e44850-bound-sa-token\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.154271 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c6867584-8090-43a9-bf55-f3fda830e5f0-proxy-tls\") pod \"machine-config-controller-84d6567774-sgr7v\" (UID: \"c6867584-8090-43a9-bf55-f3fda830e5f0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sgr7v" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.155069 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/446a34a0-fac2-477b-8186-db8ec5fbdf5c-apiservice-cert\") pod \"packageserver-d55dfcdfc-xhq8j\" (UID: \"446a34a0-fac2-477b-8186-db8ec5fbdf5c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.157682 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/37486438-b8c6-4b20-911f-f6e66393f414-proxy-tls\") pod \"machine-config-operator-74547568cd-qxmc5\" (UID: \"37486438-b8c6-4b20-911f-f6e66393f414\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.158615 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d253a0a7-4cc1-472f-b741-76da282b7672-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-cpw5s\" (UID: \"d253a0a7-4cc1-472f-b741-76da282b7672\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cpw5s" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.159213 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-d97wj\" (UID: \"dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a\") " pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.159327 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1892d78f-ccb4-491b-8622-84340df88183-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-68slc\" (UID: \"1892d78f-ccb4-491b-8622-84340df88183\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-68slc" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.159873 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f4473144-1622-451f-94f6-b2d878657bfd-cert\") pod \"ingress-canary-j7kc7\" (UID: \"f4473144-1622-451f-94f6-b2d878657bfd\") " pod="openshift-ingress-canary/ingress-canary-j7kc7" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.181439 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psg4c\" (UniqueName: \"kubernetes.io/projected/543fff0a-4d3c-4bfd-9239-32a0f80a4092-kube-api-access-psg4c\") pod \"cluster-image-registry-operator-dc59b4c8b-h4gb7\" (UID: \"543fff0a-4d3c-4bfd-9239-32a0f80a4092\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.186165 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:39 crc kubenswrapper[4631]: E1204 17:30:39.186662 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:39.686645185 +0000 UTC m=+169.718887183 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.199019 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/37b1b8a1-6ccc-4600-9430-32196ec1c9a4-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w2p6c\" (UID: \"37b1b8a1-6ccc-4600-9430-32196ec1c9a4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w2p6c" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.217985 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnfg2\" (UniqueName: \"kubernetes.io/projected/f4473144-1622-451f-94f6-b2d878657bfd-kube-api-access-dnfg2\") pod \"ingress-canary-j7kc7\" (UID: \"f4473144-1622-451f-94f6-b2d878657bfd\") " pod="openshift-ingress-canary/ingress-canary-j7kc7" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.235574 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.251615 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-j7kc7" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.252651 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfhjw\" (UniqueName: \"kubernetes.io/projected/446a34a0-fac2-477b-8186-db8ec5fbdf5c-kube-api-access-lfhjw\") pod \"packageserver-d55dfcdfc-xhq8j\" (UID: \"446a34a0-fac2-477b-8186-db8ec5fbdf5c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.253220 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.263414 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rq89\" (UniqueName: \"kubernetes.io/projected/1892d78f-ccb4-491b-8622-84340df88183-kube-api-access-9rq89\") pod \"multus-admission-controller-857f4d67dd-68slc\" (UID: \"1892d78f-ccb4-491b-8622-84340df88183\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-68slc" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.288978 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:39 crc kubenswrapper[4631]: E1204 17:30:39.289438 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:39.78942232 +0000 UTC m=+169.821664318 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.292210 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.294096 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzflr\" (UniqueName: \"kubernetes.io/projected/f495761a-e318-4f2a-a41a-7baa61110fe7-kube-api-access-vzflr\") pod \"package-server-manager-789f6589d5-kkczr\" (UID: \"f495761a-e318-4f2a-a41a-7baa61110fe7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kkczr" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.306675 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhlqn\" (UniqueName: \"kubernetes.io/projected/4212359b-081e-4b11-8ca7-87cb9ff33a1c-kube-api-access-mhlqn\") pod \"collect-profiles-29414490-4s8gs\" (UID: \"4212359b-081e-4b11-8ca7-87cb9ff33a1c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.314139 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zm52l\" (UniqueName: \"kubernetes.io/projected/0e506b06-8627-4340-b64b-13a003f52d0d-kube-api-access-zm52l\") pod \"service-ca-9c57cc56f-pqqkz\" (UID: \"0e506b06-8627-4340-b64b-13a003f52d0d\") " pod="openshift-service-ca/service-ca-9c57cc56f-pqqkz" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.328941 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w2p6c" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.333235 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26vhf\" (UniqueName: \"kubernetes.io/projected/7b362a1b-58aa-418c-be9b-b58655c4fe77-kube-api-access-26vhf\") pod \"olm-operator-6b444d44fb-j54mq\" (UID: \"7b362a1b-58aa-418c-be9b-b58655c4fe77\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j54mq" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.334737 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cpw5s" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.353814 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rsqp8"] Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.359772 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wj7m\" (UniqueName: \"kubernetes.io/projected/9409ba61-da1b-4587-8b49-c6f70b109e4d-kube-api-access-4wj7m\") pod \"openshift-apiserver-operator-796bbdcf4f-96c5q\" (UID: \"9409ba61-da1b-4587-8b49-c6f70b109e4d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-96c5q" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.375886 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-8kcrj"] Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.376212 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-swgqn" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.378259 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wp8v8\" (UniqueName: \"kubernetes.io/projected/2eb4ea57-e198-45b1-8f63-d95d4a223362-kube-api-access-wp8v8\") pod \"service-ca-operator-777779d784-g4684\" (UID: \"2eb4ea57-e198-45b1-8f63-d95d4a223362\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-g4684" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.381161 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s"] Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.383119 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-68slc" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.393536 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:39 crc kubenswrapper[4631]: E1204 17:30:39.394146 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:39.894129931 +0000 UTC m=+169.926371929 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.395244 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-96c5q" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.397610 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjvp6\" (UniqueName: \"kubernetes.io/projected/db12e396-cc65-44e6-89f4-e4458958f443-kube-api-access-cjvp6\") pod \"migrator-59844c95c7-6lshd\" (UID: \"db12e396-cc65-44e6-89f4-e4458958f443\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6lshd" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.399329 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5vvr2"] Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.411278 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kkczr" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.412905 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g49sm\" (UniqueName: \"kubernetes.io/projected/200bbd12-8133-46c5-a69f-ed2dd0b5e191-kube-api-access-g49sm\") pod \"csi-hostpathplugin-mdxjh\" (UID: \"200bbd12-8133-46c5-a69f-ed2dd0b5e191\") " pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.443972 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6lshd" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.444412 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9qbmf"] Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.453111 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbmq2\" (UniqueName: \"kubernetes.io/projected/c6867584-8090-43a9-bf55-f3fda830e5f0-kube-api-access-rbmq2\") pod \"machine-config-controller-84d6567774-sgr7v\" (UID: \"c6867584-8090-43a9-bf55-f3fda830e5f0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sgr7v" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.465874 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-pqqkz" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.472048 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-br8kn"] Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.475467 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz5jw\" (UniqueName: \"kubernetes.io/projected/6689754f-f13b-48f7-89a6-353ef119cd75-kube-api-access-tz5jw\") pod \"catalog-operator-68c6474976-vtdlj\" (UID: \"6689754f-f13b-48f7-89a6-353ef119cd75\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vtdlj" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.478829 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.485534 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr"] Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.488498 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.496776 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sgr7v" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.497856 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7thzz\" (UniqueName: \"kubernetes.io/projected/0f92bdbc-4785-44bf-a91c-88fe53b02d2a-kube-api-access-7thzz\") pod \"control-plane-machine-set-operator-78cbb6b69f-gswpr\" (UID: \"0f92bdbc-4785-44bf-a91c-88fe53b02d2a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-gswpr" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.501161 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rw8x5\" (UniqueName: \"kubernetes.io/projected/37486438-b8c6-4b20-911f-f6e66393f414-kube-api-access-rw8x5\") pod \"machine-config-operator-74547568cd-qxmc5\" (UID: \"37486438-b8c6-4b20-911f-f6e66393f414\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.501227 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.501795 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-vgm6t"] Dec 04 17:30:39 crc kubenswrapper[4631]: E1204 17:30:39.502102 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:40.002079705 +0000 UTC m=+170.034321703 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.516925 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4h27\" (UniqueName: \"kubernetes.io/projected/14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6-kube-api-access-v4h27\") pod \"machine-config-server-vndv7\" (UID: \"14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6\") " pod="openshift-machine-config-operator/machine-config-server-vndv7" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.518662 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:39 crc kubenswrapper[4631]: E1204 17:30:39.519210 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:40.019195019 +0000 UTC m=+170.051437017 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.520559 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-g4684" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.522977 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j54mq" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.537958 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-vndv7" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.544876 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-hc4kn"] Dec 04 17:30:39 crc kubenswrapper[4631]: W1204 17:30:39.552713 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod808ad22e_4629_4a7d_8613_b35fedecc2ed.slice/crio-5f90a87a10d294af9e1ccf2877a644d246d881279f7242d74439d50fa7318f2f WatchSource:0}: Error finding container 5f90a87a10d294af9e1ccf2877a644d246d881279f7242d74439d50fa7318f2f: Status 404 returned error can't find the container with id 5f90a87a10d294af9e1ccf2877a644d246d881279f7242d74439d50fa7318f2f Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.561753 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.573610 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnrc2\" (UniqueName: \"kubernetes.io/projected/dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a-kube-api-access-fnrc2\") pod \"marketplace-operator-79b997595-d97wj\" (UID: \"dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a\") " pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.574448 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llffc\" (UniqueName: \"kubernetes.io/projected/6b035e50-4c55-4081-8ff2-3f720a8fa1b4-kube-api-access-llffc\") pod \"dns-default-h5fb2\" (UID: \"6b035e50-4c55-4081-8ff2-3f720a8fa1b4\") " pod="openshift-dns/dns-default-h5fb2" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.593958 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksbd6\" (UniqueName: \"kubernetes.io/projected/496d955a-3328-4f38-86db-e5c382672b27-kube-api-access-ksbd6\") pod \"machine-approver-56656f9798-4wt5l\" (UID: \"496d955a-3328-4f38-86db-e5c382672b27\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4wt5l" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.620347 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:39 crc kubenswrapper[4631]: E1204 17:30:39.621131 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:40.121115849 +0000 UTC m=+170.153357837 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.673449 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" event={"ID":"e4ee6a0c-a43b-4cef-b54d-498f84fc947e","Type":"ContainerStarted","Data":"8e121c9be77c1c37c528dc16d7d10117f402c63b04607552efdde148f87f0e97"} Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.674413 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-br8kn" event={"ID":"e59edbf8-41cb-4d41-b012-a5e2dcf83df4","Type":"ContainerStarted","Data":"8f4728c70dbef7297e8a37b5b7ad2eeb189a062b57928703e501dd0f0f0efc30"} Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.675348 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-hgwwp" event={"ID":"f645aac4-2afb-45b9-8f28-dca8259c7278","Type":"ContainerStarted","Data":"f4fc0db7b5054c5dd5f1baf2fb88ea583cc81154d5d0ade8c05c7addfa92d0fa"} Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.676090 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-g65qn" event={"ID":"832113b6-ef23-4863-8080-cea0494584e7","Type":"ContainerStarted","Data":"001a231a4bb3e645d315569890864dcca5d779dcdef822ab89d5f5479d40c4ed"} Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.679068 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-kl45g" event={"ID":"81559ff3-95e9-455f-9d90-46c5f1a981ce","Type":"ContainerStarted","Data":"9a53639931dcaff4c0dda3b62d08387b8a9ba8f62219681043ee47b784d8b829"} Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.679818 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s" event={"ID":"ff4cb9cc-d164-4771-8e5f-41acc28c25e6","Type":"ContainerStarted","Data":"69ebe8ba326318fbe579c7712f4d59fd2d7e7cd9f0830a41d43842d3b7674efe"} Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.680445 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" event={"ID":"85d00b11-ed99-44ac-81b8-73d958bc4d3e","Type":"ContainerStarted","Data":"c69423100875f5d43cc9ba2e43ed29f7919ce1026c8e326060835f9f0dda6e98"} Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.681470 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" event={"ID":"808ad22e-4629-4a7d-8613-b35fedecc2ed","Type":"ContainerStarted","Data":"5f90a87a10d294af9e1ccf2877a644d246d881279f7242d74439d50fa7318f2f"} Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.682977 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" event={"ID":"86903bd1-674d-4fa2-b9d1-dbc8f347b72b","Type":"ContainerStarted","Data":"ce4c3caaa4dbc7d1a7d93314a9a93708aca89e2cf00729f4f77ad387871e56d8"} Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.683969 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rsqp8" event={"ID":"c436effe-de7b-4e3b-a61a-0ff4a7067363","Type":"ContainerStarted","Data":"b6dbc2acaaf77c1dac594db60cf56314d73b010ce3c0faccae2c01e87e0d6649"} Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.684720 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-glqcf" event={"ID":"db00623b-8c6a-45d1-ab8b-a7e4f81f64eb","Type":"ContainerStarted","Data":"3c34c02a580eb51c40fd270e4e93811362b8b397abb1e6703aa8ee942bf438b0"} Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.685480 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" event={"ID":"f6f09a1b-f9c2-43ec-8222-1fa25a379095","Type":"ContainerStarted","Data":"6194b319986ca79d53141e6b261f20da5c0a23c09056bb757d7051598c22c6cd"} Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.720787 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-gswpr" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.722282 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:39 crc kubenswrapper[4631]: E1204 17:30:39.722906 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:40.222889715 +0000 UTC m=+170.255131713 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.735424 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.753700 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4wt5l" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.770853 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vtdlj" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.813017 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.823386 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:39 crc kubenswrapper[4631]: E1204 17:30:39.823634 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:40.323618051 +0000 UTC m=+170.355860049 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.823747 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:39 crc kubenswrapper[4631]: E1204 17:30:39.824038 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:40.324030363 +0000 UTC m=+170.356272361 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.830404 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-h5fb2" Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.927298 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:39 crc kubenswrapper[4631]: E1204 17:30:39.927496 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:40.427471167 +0000 UTC m=+170.459713165 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:39 crc kubenswrapper[4631]: I1204 17:30:39.927532 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:39 crc kubenswrapper[4631]: E1204 17:30:39.928012 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:40.427999853 +0000 UTC m=+170.460241841 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.028901 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:40 crc kubenswrapper[4631]: E1204 17:30:40.029566 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:40.529544072 +0000 UTC m=+170.561786080 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.130469 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:40 crc kubenswrapper[4631]: E1204 17:30:40.131205 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:40.631178634 +0000 UTC m=+170.663420672 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.231177 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:40 crc kubenswrapper[4631]: E1204 17:30:40.231431 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:40.731401925 +0000 UTC m=+170.763643923 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.231461 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:40 crc kubenswrapper[4631]: E1204 17:30:40.231851 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:40.731842648 +0000 UTC m=+170.764084646 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:40 crc kubenswrapper[4631]: W1204 17:30:40.235132 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod30620276_02bb_4cea_a50e_36fc7d4689ae.slice/crio-96c767b9799f3d3f1710e6288a1669631a2afbc5c12b59c5bfdd79a11047fcfe WatchSource:0}: Error finding container 96c767b9799f3d3f1710e6288a1669631a2afbc5c12b59c5bfdd79a11047fcfe: Status 404 returned error can't find the container with id 96c767b9799f3d3f1710e6288a1669631a2afbc5c12b59c5bfdd79a11047fcfe Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.236633 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-548f4\" (UniqueName: \"kubernetes.io/projected/daf920af-78e8-4fab-893e-f9a95d7315fe-kube-api-access-548f4\") pod \"kube-storage-version-migrator-operator-b67b599dd-fgbnd\" (UID: \"daf920af-78e8-4fab-893e-f9a95d7315fe\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fgbnd" Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.302277 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fgbnd" Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.332393 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:40 crc kubenswrapper[4631]: E1204 17:30:40.332639 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:40.832597095 +0000 UTC m=+170.864839093 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.333314 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:40 crc kubenswrapper[4631]: E1204 17:30:40.333711 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:40.833693256 +0000 UTC m=+170.865935254 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.440021 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:40 crc kubenswrapper[4631]: E1204 17:30:40.440328 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:40.940286942 +0000 UTC m=+170.972528950 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.440507 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:40 crc kubenswrapper[4631]: E1204 17:30:40.440966 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:40.940948001 +0000 UTC m=+170.973190009 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.508882 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j"] Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.542142 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:40 crc kubenswrapper[4631]: E1204 17:30:40.542352 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:41.042311315 +0000 UTC m=+171.074553313 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.542673 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:40 crc kubenswrapper[4631]: E1204 17:30:40.543025 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:41.043012165 +0000 UTC m=+171.075254163 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.602172 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cpw5s"] Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.628618 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-68slc"] Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.643820 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:40 crc kubenswrapper[4631]: E1204 17:30:40.644332 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:41.144292247 +0000 UTC m=+171.176534245 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.695121 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" event={"ID":"30620276-02bb-4cea-a50e-36fc7d4689ae","Type":"ContainerStarted","Data":"96c767b9799f3d3f1710e6288a1669631a2afbc5c12b59c5bfdd79a11047fcfe"} Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.699633 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-pdgsq" event={"ID":"9a38e196-88e0-4add-8e52-40b1d8eb79e9","Type":"ContainerStarted","Data":"a87bc8e8aa86a98420ba0cace0c5a81f30bf46acc68c136b27e5e14c1bd5715d"} Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.717533 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7"] Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.746199 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:40 crc kubenswrapper[4631]: E1204 17:30:40.746583 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:41.246566748 +0000 UTC m=+171.278808746 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.847442 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:40 crc kubenswrapper[4631]: E1204 17:30:40.847609 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:41.347586942 +0000 UTC m=+171.379828950 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.847860 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:40 crc kubenswrapper[4631]: E1204 17:30:40.848248 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:41.348238091 +0000 UTC m=+171.380480089 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:40 crc kubenswrapper[4631]: W1204 17:30:40.856417 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod925f71fa_9882_47d0_9708_0c34ebb51df8.slice/crio-f652ecb016f90bfa523714e2eb0de096d0491e911ac668b6fa893f4ae08eeed1 WatchSource:0}: Error finding container f652ecb016f90bfa523714e2eb0de096d0491e911ac668b6fa893f4ae08eeed1: Status 404 returned error can't find the container with id f652ecb016f90bfa523714e2eb0de096d0491e911ac668b6fa893f4ae08eeed1 Dec 04 17:30:40 crc kubenswrapper[4631]: I1204 17:30:40.949849 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:40 crc kubenswrapper[4631]: E1204 17:30:40.951245 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:41.451223382 +0000 UTC m=+171.483465390 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.052288 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:41 crc kubenswrapper[4631]: E1204 17:30:41.052784 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:41.552769221 +0000 UTC m=+171.585011219 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.129602 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-z2968"] Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.153936 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:41 crc kubenswrapper[4631]: E1204 17:30:41.154271 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:41.654230619 +0000 UTC m=+171.686472617 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.154652 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:41 crc kubenswrapper[4631]: E1204 17:30:41.155132 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:41.655111414 +0000 UTC m=+171.687353412 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.256465 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:41 crc kubenswrapper[4631]: E1204 17:30:41.256918 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:41.75688578 +0000 UTC m=+171.789127838 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.292598 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-96c5q"] Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.358349 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:41 crc kubenswrapper[4631]: E1204 17:30:41.358778 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:41.858764269 +0000 UTC m=+171.891006267 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.381287 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-mdxjh"] Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.413941 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-j7kc7"] Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.459213 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:41 crc kubenswrapper[4631]: E1204 17:30:41.459535 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:41.959519856 +0000 UTC m=+171.991761854 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.513447 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs"] Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.516300 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j54mq"] Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.542044 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5"] Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.560445 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:41 crc kubenswrapper[4631]: E1204 17:30:41.560776 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:42.060764507 +0000 UTC m=+172.093006505 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.614216 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-gswpr"] Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.662108 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:41 crc kubenswrapper[4631]: E1204 17:30:41.662623 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:42.162599715 +0000 UTC m=+172.194841713 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:41 crc kubenswrapper[4631]: W1204 17:30:41.747080 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4212359b_081e_4b11_8ca7_87cb9ff33a1c.slice/crio-73f8d8eb60dc9d5f74cb293a3e040663555483259a51a1c481ed5760768e399b WatchSource:0}: Error finding container 73f8d8eb60dc9d5f74cb293a3e040663555483259a51a1c481ed5760768e399b: Status 404 returned error can't find the container with id 73f8d8eb60dc9d5f74cb293a3e040663555483259a51a1c481ed5760768e399b Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.747505 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" event={"ID":"200bbd12-8133-46c5-a69f-ed2dd0b5e191","Type":"ContainerStarted","Data":"8605de396c37f178ec00e02931a3578b1984c7321159e1bcf6d92173fc8992d6"} Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.750889 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-96c5q" event={"ID":"9409ba61-da1b-4587-8b49-c6f70b109e4d","Type":"ContainerStarted","Data":"ceb808e636a75e6efa58b1a769dc059e5648ce91519d272d80cfe9dee39df736"} Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.754877 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7" event={"ID":"543fff0a-4d3c-4bfd-9239-32a0f80a4092","Type":"ContainerStarted","Data":"bec67dc6d5bb9ec7a33f6f40fb5b33b7d37fd4c07cbb5e258fa5c118bacab6b2"} Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.759473 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-vndv7" event={"ID":"14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6","Type":"ContainerStarted","Data":"dedb35fcf5f904d0131d5285e5fce34350d96ddc7ac091dd2673457510148a5a"} Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.761519 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" event={"ID":"3c27a704-3b03-4210-bbbf-7179dbfe39ff","Type":"ContainerStarted","Data":"ccb1f24e6b19839ca537545172c396fdbf118c3f4aaf9faeb30a6ddb4a74cd27"} Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.765738 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:41 crc kubenswrapper[4631]: E1204 17:30:41.766306 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:42.266286556 +0000 UTC m=+172.298528554 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.774587 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" event={"ID":"925f71fa-9882-47d0-9708-0c34ebb51df8","Type":"ContainerStarted","Data":"f652ecb016f90bfa523714e2eb0de096d0491e911ac668b6fa893f4ae08eeed1"} Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.776156 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cpw5s" event={"ID":"d253a0a7-4cc1-472f-b741-76da282b7672","Type":"ContainerStarted","Data":"6b84b19e09c7eb3cd408ad1e37b1c89c961f8487be2154c03e86a5951681e3ff"} Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.778183 4631 generic.go:334] "Generic (PLEG): container finished" podID="dbc61eb8-0a7c-4ea2-8f50-5a2522daa465" containerID="5d8eb8c639c946b1892ee37ea0011926046c43d8b9d363ff123aea7c30068d02" exitCode=0 Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.778240 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" event={"ID":"dbc61eb8-0a7c-4ea2-8f50-5a2522daa465","Type":"ContainerDied","Data":"5d8eb8c639c946b1892ee37ea0011926046c43d8b9d363ff123aea7c30068d02"} Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.779549 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-68slc" event={"ID":"1892d78f-ccb4-491b-8622-84340df88183","Type":"ContainerStarted","Data":"668bf663c6482d634442db3186a46ed2720679d41e8b0185b4012a3b6a93a0fa"} Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.781543 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-j7kc7" event={"ID":"f4473144-1622-451f-94f6-b2d878657bfd","Type":"ContainerStarted","Data":"6f76089561c6bcc927c8d98e7360e7d3be348a2c9fcb356971883bf3e4c18a80"} Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.869041 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:41 crc kubenswrapper[4631]: E1204 17:30:41.869612 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:42.369589786 +0000 UTC m=+172.401831784 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.918464 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d97wj"] Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.973221 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:41 crc kubenswrapper[4631]: E1204 17:30:41.973736 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:42.47371936 +0000 UTC m=+172.505961358 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.974740 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j"] Dec 04 17:30:41 crc kubenswrapper[4631]: I1204 17:30:41.979445 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-sgr7v"] Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.075105 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:42 crc kubenswrapper[4631]: E1204 17:30:42.075704 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:42.575678912 +0000 UTC m=+172.607920910 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:42 crc kubenswrapper[4631]: W1204 17:30:42.119902 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddcf8ab5e_a0db_4005_ad40_4bf2d5068b4a.slice/crio-00d2a0be36f2c74451f3de6d12ca5cad2e80bb18fb3eae8e6d7602bdd402c15c WatchSource:0}: Error finding container 00d2a0be36f2c74451f3de6d12ca5cad2e80bb18fb3eae8e6d7602bdd402c15c: Status 404 returned error can't find the container with id 00d2a0be36f2c74451f3de6d12ca5cad2e80bb18fb3eae8e6d7602bdd402c15c Dec 04 17:30:42 crc kubenswrapper[4631]: W1204 17:30:42.170989 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod446a34a0_fac2_477b_8186_db8ec5fbdf5c.slice/crio-bb7668b3332f96c79640c6f6c5e32b20e41e243890a7807e1a26e155e4fc55a7 WatchSource:0}: Error finding container bb7668b3332f96c79640c6f6c5e32b20e41e243890a7807e1a26e155e4fc55a7: Status 404 returned error can't find the container with id bb7668b3332f96c79640c6f6c5e32b20e41e243890a7807e1a26e155e4fc55a7 Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.176525 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:42 crc kubenswrapper[4631]: E1204 17:30:42.176876 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:42.676861501 +0000 UTC m=+172.709103499 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.255569 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vtdlj"] Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.257473 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-g4684"] Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.260519 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-swgqn"] Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.278709 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:42 crc kubenswrapper[4631]: E1204 17:30:42.279002 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:42.778967817 +0000 UTC m=+172.811209835 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.279438 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:42 crc kubenswrapper[4631]: E1204 17:30:42.279992 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:42.779979426 +0000 UTC m=+172.812221424 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:42 crc kubenswrapper[4631]: W1204 17:30:42.336126 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod87263d28_d75f_4701_9ac1_576084547adf.slice/crio-2e1f491223cce3c22fa72ac7517ac97c97599ba211609442d3beaae7e7e39cbd WatchSource:0}: Error finding container 2e1f491223cce3c22fa72ac7517ac97c97599ba211609442d3beaae7e7e39cbd: Status 404 returned error can't find the container with id 2e1f491223cce3c22fa72ac7517ac97c97599ba211609442d3beaae7e7e39cbd Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.370261 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kkczr"] Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.372409 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w2p6c"] Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.380494 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:42 crc kubenswrapper[4631]: E1204 17:30:42.380898 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:42.880880517 +0000 UTC m=+172.913122515 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.394110 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6lshd"] Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.420921 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fgbnd"] Dec 04 17:30:42 crc kubenswrapper[4631]: W1204 17:30:42.452720 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37b1b8a1_6ccc_4600_9430_32196ec1c9a4.slice/crio-592d3ea1f83a9a8cc914aefbbf8dc6b02e276a9602e2475a3d0c1786942baaeb WatchSource:0}: Error finding container 592d3ea1f83a9a8cc914aefbbf8dc6b02e276a9602e2475a3d0c1786942baaeb: Status 404 returned error can't find the container with id 592d3ea1f83a9a8cc914aefbbf8dc6b02e276a9602e2475a3d0c1786942baaeb Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.481750 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:42 crc kubenswrapper[4631]: E1204 17:30:42.482070 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:42.982058856 +0000 UTC m=+173.014300854 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.522009 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-pqqkz"] Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.530723 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-h5fb2"] Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.582462 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:42 crc kubenswrapper[4631]: E1204 17:30:42.582622 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:43.082596916 +0000 UTC m=+173.114838914 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.582850 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:42 crc kubenswrapper[4631]: E1204 17:30:42.583272 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:43.083254865 +0000 UTC m=+173.115496863 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.685835 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:42 crc kubenswrapper[4631]: E1204 17:30:42.686299 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:43.186271286 +0000 UTC m=+173.218513284 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:42 crc kubenswrapper[4631]: W1204 17:30:42.753451 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0e506b06_8627_4340_b64b_13a003f52d0d.slice/crio-ac9fa57fbbf94715b95844c9da06facb501328c1dddd41a50a4ca78e6826d5cf WatchSource:0}: Error finding container ac9fa57fbbf94715b95844c9da06facb501328c1dddd41a50a4ca78e6826d5cf: Status 404 returned error can't find the container with id ac9fa57fbbf94715b95844c9da06facb501328c1dddd41a50a4ca78e6826d5cf Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.788450 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:42 crc kubenswrapper[4631]: E1204 17:30:42.788966 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:43.288944158 +0000 UTC m=+173.321186146 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.800207 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-h5fb2" event={"ID":"6b035e50-4c55-4081-8ff2-3f720a8fa1b4","Type":"ContainerStarted","Data":"2bd583cefece119017bcfe61c75d2d39f3d101f3914e4ec9df4651f48eaab231"} Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.808590 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-g4684" event={"ID":"2eb4ea57-e198-45b1-8f63-d95d4a223362","Type":"ContainerStarted","Data":"bb873e94cb906100d5f6271f7f1004588e6d3f215a2118d53803d4931e1b296f"} Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.810539 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fgbnd" event={"ID":"daf920af-78e8-4fab-893e-f9a95d7315fe","Type":"ContainerStarted","Data":"c6e59dea9cdf4f284f6a3aac59be445a4feadfe542f5e248161d1f1bb08325b7"} Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.813104 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6lshd" event={"ID":"db12e396-cc65-44e6-89f4-e4458958f443","Type":"ContainerStarted","Data":"fd64f729100a400d8e1b26d83293ca9361df2ef58d2d2ad9fa9ca73eb9c4982f"} Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.816412 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-pqqkz" event={"ID":"0e506b06-8627-4340-b64b-13a003f52d0d","Type":"ContainerStarted","Data":"ac9fa57fbbf94715b95844c9da06facb501328c1dddd41a50a4ca78e6826d5cf"} Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.823649 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j54mq" event={"ID":"7b362a1b-58aa-418c-be9b-b58655c4fe77","Type":"ContainerStarted","Data":"e2815979c20058abaa3bcfb1e187f4ec758fce68516ee33901edfe87e32c3227"} Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.833161 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-gswpr" event={"ID":"0f92bdbc-4785-44bf-a91c-88fe53b02d2a","Type":"ContainerStarted","Data":"d09bb2fc11b0039dc53e25fd64b9f34a9552b48bcf39cefe33be36c7a507cb1f"} Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.834785 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j" event={"ID":"446a34a0-fac2-477b-8186-db8ec5fbdf5c","Type":"ContainerStarted","Data":"bb7668b3332f96c79640c6f6c5e32b20e41e243890a7807e1a26e155e4fc55a7"} Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.835806 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w2p6c" event={"ID":"37b1b8a1-6ccc-4600-9430-32196ec1c9a4","Type":"ContainerStarted","Data":"592d3ea1f83a9a8cc914aefbbf8dc6b02e276a9602e2475a3d0c1786942baaeb"} Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.836546 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs" event={"ID":"4212359b-081e-4b11-8ca7-87cb9ff33a1c","Type":"ContainerStarted","Data":"73f8d8eb60dc9d5f74cb293a3e040663555483259a51a1c481ed5760768e399b"} Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.837511 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sgr7v" event={"ID":"c6867584-8090-43a9-bf55-f3fda830e5f0","Type":"ContainerStarted","Data":"eb8c01411919fde796186528ffa50f39d8e71b2508913fc6e9016a03c4b9f8fd"} Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.838239 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kkczr" event={"ID":"f495761a-e318-4f2a-a41a-7baa61110fe7","Type":"ContainerStarted","Data":"b5f5de09e2f137e556f8083ff2d6474d4b9fb176432af1d0823caa199849eb18"} Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.839519 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4wt5l" event={"ID":"496d955a-3328-4f38-86db-e5c382672b27","Type":"ContainerStarted","Data":"9dd7041c2a9fc9669f31f6378240842ee3f6a33bfb38f6a4268331e8195ab271"} Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.840724 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vtdlj" event={"ID":"6689754f-f13b-48f7-89a6-353ef119cd75","Type":"ContainerStarted","Data":"f508ac87813c48be4ae1d9642e47ee091a323eb41cbf967162944f161b196a3f"} Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.841796 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" event={"ID":"dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a","Type":"ContainerStarted","Data":"00d2a0be36f2c74451f3de6d12ca5cad2e80bb18fb3eae8e6d7602bdd402c15c"} Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.843250 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-swgqn" event={"ID":"87263d28-d75f-4701-9ac1-576084547adf","Type":"ContainerStarted","Data":"2e1f491223cce3c22fa72ac7517ac97c97599ba211609442d3beaae7e7e39cbd"} Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.845898 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5" event={"ID":"37486438-b8c6-4b20-911f-f6e66393f414","Type":"ContainerStarted","Data":"902580abb5365a62b64475e6ce2d95ff50e4ba8e8d08679b6c82b77820c03898"} Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.889704 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:42 crc kubenswrapper[4631]: E1204 17:30:42.890180 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:43.390070526 +0000 UTC m=+173.422312524 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.890443 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:42 crc kubenswrapper[4631]: E1204 17:30:42.890903 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:43.390881929 +0000 UTC m=+173.423123917 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.993112 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:42 crc kubenswrapper[4631]: E1204 17:30:42.993351 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:43.493315814 +0000 UTC m=+173.525557802 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:42 crc kubenswrapper[4631]: I1204 17:30:42.993434 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:42 crc kubenswrapper[4631]: E1204 17:30:42.993800 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:43.493783588 +0000 UTC m=+173.526025586 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.095305 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:43 crc kubenswrapper[4631]: E1204 17:30:43.095721 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:43.595700828 +0000 UTC m=+173.627942816 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.197483 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:43 crc kubenswrapper[4631]: E1204 17:30:43.197810 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:43.697793504 +0000 UTC m=+173.730035502 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.302287 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:43 crc kubenswrapper[4631]: E1204 17:30:43.302526 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:43.802499924 +0000 UTC m=+173.834741922 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.303015 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:43 crc kubenswrapper[4631]: E1204 17:30:43.303482 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:43.803462832 +0000 UTC m=+173.835704830 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.404206 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:43 crc kubenswrapper[4631]: E1204 17:30:43.404750 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:43.904728973 +0000 UTC m=+173.936970971 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.506204 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:43 crc kubenswrapper[4631]: E1204 17:30:43.506769 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:44.006743416 +0000 UTC m=+174.038985414 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.607144 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:43 crc kubenswrapper[4631]: E1204 17:30:43.607397 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:44.107337178 +0000 UTC m=+174.139579176 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.607507 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:43 crc kubenswrapper[4631]: E1204 17:30:43.607974 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:44.107965996 +0000 UTC m=+174.140207994 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.708744 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:43 crc kubenswrapper[4631]: E1204 17:30:43.708862 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:44.208844157 +0000 UTC m=+174.241086155 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.709011 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:43 crc kubenswrapper[4631]: E1204 17:30:43.709299 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:44.20929222 +0000 UTC m=+174.241534218 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.810190 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:43 crc kubenswrapper[4631]: E1204 17:30:43.810362 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:44.310341305 +0000 UTC m=+174.342583313 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.810538 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:43 crc kubenswrapper[4631]: E1204 17:30:43.810825 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:44.310815039 +0000 UTC m=+174.343057047 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.853088 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s" event={"ID":"ff4cb9cc-d164-4771-8e5f-41acc28c25e6","Type":"ContainerStarted","Data":"df3f6ec34bbd85a7853e612de01dc2c8d4570527e15c892b0d505ece6ca398d7"} Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.855186 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-kl45g" event={"ID":"81559ff3-95e9-455f-9d90-46c5f1a981ce","Type":"ContainerStarted","Data":"e0d1d532fa5a56c3f192ff264dd0efe882bcba977f2c11b0a5b512b51a486a9f"} Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.856832 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" event={"ID":"e4ee6a0c-a43b-4cef-b54d-498f84fc947e","Type":"ContainerStarted","Data":"c01491010c9c6088ef01de3ea367059c5c79d7f0d4a0e3fe49ffaebd0a7b1b9b"} Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.859118 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-hgwwp" event={"ID":"f645aac4-2afb-45b9-8f28-dca8259c7278","Type":"ContainerStarted","Data":"ee2cb9947804b4d67ba33e340b81c28fa93cd10ed641572a8f74cbb453698911"} Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.860616 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-g65qn" event={"ID":"832113b6-ef23-4863-8080-cea0494584e7","Type":"ContainerStarted","Data":"4abadcbb1ef3ad822405c3f61d3749e911767dc352d83dbc600021bd5825ae5f"} Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.861853 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-glqcf" event={"ID":"db00623b-8c6a-45d1-ab8b-a7e4f81f64eb","Type":"ContainerStarted","Data":"600b84eaaf010ee2fe89941b54bc77466c9db31d1a46ef7b9f2a004378e8f8bd"} Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.862746 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9qbmf" event={"ID":"4ef93ccc-0327-4a63-8f44-4cb3f9e1ba83","Type":"ContainerStarted","Data":"5d0ac372a47065a5ac9f1b5f0e3196a9e9cfd55029d9edc181c0bfad5d780e93"} Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.912143 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:43 crc kubenswrapper[4631]: E1204 17:30:43.912245 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:44.412229344 +0000 UTC m=+174.444471342 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:43 crc kubenswrapper[4631]: I1204 17:30:43.912414 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:43 crc kubenswrapper[4631]: E1204 17:30:43.914474 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:44.414455589 +0000 UTC m=+174.446697657 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.014754 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:44 crc kubenswrapper[4631]: E1204 17:30:44.015268 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:44.515237036 +0000 UTC m=+174.547479034 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.015726 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:44 crc kubenswrapper[4631]: E1204 17:30:44.016251 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:44.516230955 +0000 UTC m=+174.548472963 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.117290 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:44 crc kubenswrapper[4631]: E1204 17:30:44.117670 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:44.617655491 +0000 UTC m=+174.649897489 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.218906 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:44 crc kubenswrapper[4631]: E1204 17:30:44.219273 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:44.719262332 +0000 UTC m=+174.751504330 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.320585 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:44 crc kubenswrapper[4631]: E1204 17:30:44.320819 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:44.820789031 +0000 UTC m=+174.853031039 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.321750 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:44 crc kubenswrapper[4631]: E1204 17:30:44.322116 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:44.822100659 +0000 UTC m=+174.854342657 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.422814 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:44 crc kubenswrapper[4631]: E1204 17:30:44.423277 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:44.923259307 +0000 UTC m=+174.955501305 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.524744 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:44 crc kubenswrapper[4631]: E1204 17:30:44.525323 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:45.025295211 +0000 UTC m=+175.057537379 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.626557 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:44 crc kubenswrapper[4631]: E1204 17:30:44.626802 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:45.126767378 +0000 UTC m=+175.159009376 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.627022 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:44 crc kubenswrapper[4631]: E1204 17:30:44.627467 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:45.127455098 +0000 UTC m=+175.159697096 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.743645 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:44 crc kubenswrapper[4631]: E1204 17:30:44.743903 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:45.243866327 +0000 UTC m=+175.276108325 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.744080 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:44 crc kubenswrapper[4631]: E1204 17:30:44.744548 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:45.244534726 +0000 UTC m=+175.276776924 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.845527 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:44 crc kubenswrapper[4631]: E1204 17:30:44.845758 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:45.345727125 +0000 UTC m=+175.377969133 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.855155 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:44 crc kubenswrapper[4631]: E1204 17:30:44.855626 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:45.3556053 +0000 UTC m=+175.387847478 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.868973 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" event={"ID":"30620276-02bb-4cea-a50e-36fc7d4689ae","Type":"ContainerStarted","Data":"0ab2ab5778b6f962c77c239a46aaedbf16c5d836fe5021987e81e07be37177f4"} Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.870532 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" event={"ID":"f6f09a1b-f9c2-43ec-8222-1fa25a379095","Type":"ContainerStarted","Data":"e124d0d4346b689a96e88cae51fae98d342905979533b5dadd243e8a27933019"} Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.871585 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-br8kn" event={"ID":"e59edbf8-41cb-4d41-b012-a5e2dcf83df4","Type":"ContainerStarted","Data":"631af24e2e20868e4ada17b5c1d1ecb23ae26a69d297e4423f8d936161102eb6"} Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.873039 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7" event={"ID":"543fff0a-4d3c-4bfd-9239-32a0f80a4092","Type":"ContainerStarted","Data":"fbf1d1f489df9963d7b6f6836c30d4a2e64aa8206e8625da9fa7479b99b8b186"} Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.874133 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rsqp8" event={"ID":"c436effe-de7b-4e3b-a61a-0ff4a7067363","Type":"ContainerStarted","Data":"c204284ebb055c564ad8449fb979ab8e326d9c4b009dbe327d873adc33f44000"} Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.875104 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" event={"ID":"808ad22e-4629-4a7d-8613-b35fedecc2ed","Type":"ContainerStarted","Data":"dbc8352df7a7a5c1d3882ccc136e9a60b582a784ce1417a33f2692708af818e3"} Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.876148 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" event={"ID":"86903bd1-674d-4fa2-b9d1-dbc8f347b72b","Type":"ContainerStarted","Data":"ddf0012440a8e2925d773c10d6d1e09fa337c9f807f45e7bd7fe85ecbee500bf"} Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.877134 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-j7kc7" event={"ID":"f4473144-1622-451f-94f6-b2d878657bfd","Type":"ContainerStarted","Data":"97fa704a14139fe52cf8c215dab13902a29b021c5edcd6ba3b815084e8d8a7e9"} Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.878114 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-pqqkz" event={"ID":"0e506b06-8627-4340-b64b-13a003f52d0d","Type":"ContainerStarted","Data":"ba23d02ae077935c9805cf16318ca143ae52e46f64861c555e1df50d0c301996"} Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.883056 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" event={"ID":"85d00b11-ed99-44ac-81b8-73d958bc4d3e","Type":"ContainerStarted","Data":"8c86a2ea23af5a07be57b351f6333efb7ed8252c7f01538d8428e7e9150d3697"} Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.956336 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:44 crc kubenswrapper[4631]: E1204 17:30:44.956725 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:45.456698617 +0000 UTC m=+175.488940605 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:44 crc kubenswrapper[4631]: I1204 17:30:44.956822 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:44 crc kubenswrapper[4631]: E1204 17:30:44.957435 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:45.457424208 +0000 UTC m=+175.489666206 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.061607 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:45 crc kubenswrapper[4631]: E1204 17:30:45.062016 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:45.561998965 +0000 UTC m=+175.594240963 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.163134 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:45 crc kubenswrapper[4631]: E1204 17:30:45.163638 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:45.663615106 +0000 UTC m=+175.695857284 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.264780 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:45 crc kubenswrapper[4631]: E1204 17:30:45.265010 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:45.764988491 +0000 UTC m=+175.797230479 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.265491 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:45 crc kubenswrapper[4631]: E1204 17:30:45.265947 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:45.765938948 +0000 UTC m=+175.798180936 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.366869 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:45 crc kubenswrapper[4631]: E1204 17:30:45.367316 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:45.867296612 +0000 UTC m=+175.899538610 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.468602 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:45 crc kubenswrapper[4631]: E1204 17:30:45.468974 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:45.968961725 +0000 UTC m=+176.001203723 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.570122 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:45 crc kubenswrapper[4631]: E1204 17:30:45.570555 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:46.070540216 +0000 UTC m=+176.102782214 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.671618 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:45 crc kubenswrapper[4631]: E1204 17:30:45.672243 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:46.172224249 +0000 UTC m=+176.204466257 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.772956 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:45 crc kubenswrapper[4631]: E1204 17:30:45.773219 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:46.273149651 +0000 UTC m=+176.305391649 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.874738 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:45 crc kubenswrapper[4631]: E1204 17:30:45.875221 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:46.375203985 +0000 UTC m=+176.407445983 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.892497 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-96c5q" event={"ID":"9409ba61-da1b-4587-8b49-c6f70b109e4d","Type":"ContainerStarted","Data":"d283bffa7ae620f00671187ebc01ce3f57fecf53700cc9489bf975e935e64838"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.894576 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-vndv7" event={"ID":"14c0c843-4b1a-46bf-8e2b-5a233cc8d6d6","Type":"ContainerStarted","Data":"de2e64308cb29789faa57c1ec963a4374a226e9b8dea5b1d54d1d6c7f27528f9"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.900134 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6lshd" event={"ID":"db12e396-cc65-44e6-89f4-e4458958f443","Type":"ContainerStarted","Data":"75ab848aec16c839e0fda29490aaa54bc19ed9ad8149234632246d6a7caa50eb"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.903299 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" event={"ID":"dbc61eb8-0a7c-4ea2-8f50-5a2522daa465","Type":"ContainerStarted","Data":"ec28c3348459b1158f87baea0887c61951ce285ecef1f8c93f00c2dc844ad4d5"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.905651 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-pdgsq" event={"ID":"9a38e196-88e0-4add-8e52-40b1d8eb79e9","Type":"ContainerStarted","Data":"718fcf72dbf8dc0af563cc279e333891bd54a263f6fde67adc05109a44dc0767"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.908015 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-gswpr" event={"ID":"0f92bdbc-4785-44bf-a91c-88fe53b02d2a","Type":"ContainerStarted","Data":"b89abc7b2eb149601653abc7de6d5b7fc6d3bf0e598b63b26169ae10ea70e448"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.910264 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w2p6c" event={"ID":"37b1b8a1-6ccc-4600-9430-32196ec1c9a4","Type":"ContainerStarted","Data":"a6e8793ac8cf3694d03229600ae2ba3ac41799dcd00f16e104dadf87fd030906"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.912445 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kkczr" event={"ID":"f495761a-e318-4f2a-a41a-7baa61110fe7","Type":"ContainerStarted","Data":"7cb47f61b5b25c215c90800eb2d96066c8cea91b92d38409ecb6d658e07b5ae2"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.914319 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs" event={"ID":"4212359b-081e-4b11-8ca7-87cb9ff33a1c","Type":"ContainerStarted","Data":"72e4cb61ae27c39b1fe5d633cd183b6c90f27c95b6163743193fa3f2cf45de48"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.916162 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-h5fb2" event={"ID":"6b035e50-4c55-4081-8ff2-3f720a8fa1b4","Type":"ContainerStarted","Data":"d7633b91db319acbfc2327a78d44d0053abfa0972b12904987af6dc0c3206175"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.918651 4631 generic.go:334] "Generic (PLEG): container finished" podID="30620276-02bb-4cea-a50e-36fc7d4689ae" containerID="0ab2ab5778b6f962c77c239a46aaedbf16c5d836fe5021987e81e07be37177f4" exitCode=0 Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.918741 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" event={"ID":"30620276-02bb-4cea-a50e-36fc7d4689ae","Type":"ContainerDied","Data":"0ab2ab5778b6f962c77c239a46aaedbf16c5d836fe5021987e81e07be37177f4"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.922163 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-swgqn" event={"ID":"87263d28-d75f-4701-9ac1-576084547adf","Type":"ContainerStarted","Data":"6d04ed612b4a24621d5f390a8a10de8a1a741c4704fd22fd1fcfd3652c9ce4d8"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.923909 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9qbmf" event={"ID":"4ef93ccc-0327-4a63-8f44-4cb3f9e1ba83","Type":"ContainerStarted","Data":"f07f905b4e2754d4d2a52d776e893fe6c3f7fa85eecc801fe2e01ee1013a6fcc"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.926785 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" event={"ID":"3c27a704-3b03-4210-bbbf-7179dbfe39ff","Type":"ContainerStarted","Data":"d7ddef95d1f1927ab88132005df3aff4d895f4f6bb4b67a9dee7ac1c5628bda0"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.928801 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sgr7v" event={"ID":"c6867584-8090-43a9-bf55-f3fda830e5f0","Type":"ContainerStarted","Data":"5b6b3d8f2a8e2fff48c01b0ecfdf92409311204bb8b4c0356324f1980a798ad0"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.930742 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fgbnd" event={"ID":"daf920af-78e8-4fab-893e-f9a95d7315fe","Type":"ContainerStarted","Data":"475c44f033ebb0114b3cef4f19bb1b824c3c5192bf519c2f7d655d0faccacb6d"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.932387 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" event={"ID":"dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a","Type":"ContainerStarted","Data":"440fd9893a7c425a59523910df10ddb453c974398e8d56bfee3bbd2be62fa9f1"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.934409 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-68slc" event={"ID":"1892d78f-ccb4-491b-8622-84340df88183","Type":"ContainerStarted","Data":"5936fec65da87b0c8936cc68874ec0ab4452edc679542c1ffcec9457f86a7b7b"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.936557 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j" event={"ID":"446a34a0-fac2-477b-8186-db8ec5fbdf5c","Type":"ContainerStarted","Data":"4648b238349e7f9cb53ff99a9452bcc7a1f35587a039d6c75d97846c04dcb588"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.941084 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j54mq" event={"ID":"7b362a1b-58aa-418c-be9b-b58655c4fe77","Type":"ContainerStarted","Data":"3e95011ef0d51779fa2a4e3870a7f0f320400a2956c1d8b3094be660f55aa184"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.943249 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-g4684" event={"ID":"2eb4ea57-e198-45b1-8f63-d95d4a223362","Type":"ContainerStarted","Data":"b5b073a910133df25ce03ea00b0191c611129e11299dd17d5eb29ac7d1f4569a"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.945167 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" event={"ID":"925f71fa-9882-47d0-9708-0c34ebb51df8","Type":"ContainerStarted","Data":"6f77176b7879880e09a63b89a71cd8cc753e8822e58ec8224703dc51c4231abe"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.946740 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5" event={"ID":"37486438-b8c6-4b20-911f-f6e66393f414","Type":"ContainerStarted","Data":"68d8636cfd5948507120a9326dca013c891442a885150126e0be9286682a96e9"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.948580 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cpw5s" event={"ID":"d253a0a7-4cc1-472f-b741-76da282b7672","Type":"ContainerStarted","Data":"dcf7614d6cb82fd048b89c782bc5ab43fc45d42c8fbcfdcbb33c3da4b9103cd5"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.950923 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vtdlj" event={"ID":"6689754f-f13b-48f7-89a6-353ef119cd75","Type":"ContainerStarted","Data":"5e19f10d7b6c8ed8c527d509df7aca183c8e9b30a611247cb5831388fbcfa2f2"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.953245 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" event={"ID":"200bbd12-8133-46c5-a69f-ed2dd0b5e191","Type":"ContainerStarted","Data":"443f733848ad026da2b592debbb02d81fd2552f1e97c699927bf12d868baf63f"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.956689 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4wt5l" event={"ID":"496d955a-3328-4f38-86db-e5c382672b27","Type":"ContainerStarted","Data":"6e4364bc9f53613b98e24d5ae460bdd8b8d614fd3c9106844abe8cb10ebfd5ea"} Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.959091 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-g65qn" Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.959143 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-hgwwp" Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.959469 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.960480 4631 patch_prober.go:28] interesting pod/downloads-7954f5f757-g65qn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.960578 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-g65qn" podUID="832113b6-ef23-4863-8080-cea0494584e7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.976230 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:45 crc kubenswrapper[4631]: E1204 17:30:45.976820 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:46.476798926 +0000 UTC m=+176.509040924 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.982537 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:30:45 crc kubenswrapper[4631]: I1204 17:30:45.985070 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-hgwwp" Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.012563 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-h4gb7" podStartSLOduration=149.012535827 podStartE2EDuration="2m29.012535827s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:46.008714577 +0000 UTC m=+176.040956585" watchObservedRunningTime="2025-12-04 17:30:46.012535827 +0000 UTC m=+176.044777825" Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.081051 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:46 crc kubenswrapper[4631]: E1204 17:30:46.088241 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:46.58820743 +0000 UTC m=+176.620449428 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.136979 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-hgwwp" podStartSLOduration=150.136955986 podStartE2EDuration="2m30.136955986s" podCreationTimestamp="2025-12-04 17:28:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:46.075652188 +0000 UTC m=+176.107894186" watchObservedRunningTime="2025-12-04 17:30:46.136955986 +0000 UTC m=+176.169197984" Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.195418 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:46 crc kubenswrapper[4631]: E1204 17:30:46.195868 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:46.695843824 +0000 UTC m=+176.728085822 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.236071 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-vgm6t" podStartSLOduration=149.236041224 podStartE2EDuration="2m29.236041224s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:46.2338391 +0000 UTC m=+176.266081118" watchObservedRunningTime="2025-12-04 17:30:46.236041224 +0000 UTC m=+176.268283222" Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.236299 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-glqcf" podStartSLOduration=149.236293991 podStartE2EDuration="2m29.236293991s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:46.143439494 +0000 UTC m=+176.175681482" watchObservedRunningTime="2025-12-04 17:30:46.236293991 +0000 UTC m=+176.268535989" Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.297470 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:46 crc kubenswrapper[4631]: E1204 17:30:46.297960 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:46.7979439 +0000 UTC m=+176.830185898 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.366393 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-br8kn" podStartSLOduration=149.366335593 podStartE2EDuration="2m29.366335593s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:46.301488932 +0000 UTC m=+176.333730930" watchObservedRunningTime="2025-12-04 17:30:46.366335593 +0000 UTC m=+176.398577601" Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.401419 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:46 crc kubenswrapper[4631]: E1204 17:30:46.402153 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:46.902122565 +0000 UTC m=+176.934364563 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.499457 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-rsqp8" podStartSLOduration=149.499436223 podStartE2EDuration="2m29.499436223s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:46.419922949 +0000 UTC m=+176.452164947" watchObservedRunningTime="2025-12-04 17:30:46.499436223 +0000 UTC m=+176.531678221" Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.503580 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:46 crc kubenswrapper[4631]: E1204 17:30:46.504139 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:47.004125678 +0000 UTC m=+177.036367676 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.604334 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:46 crc kubenswrapper[4631]: E1204 17:30:46.604729 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:47.10471026 +0000 UTC m=+177.136952258 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.605835 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-g65qn" podStartSLOduration=150.605799001 podStartE2EDuration="2m30.605799001s" podCreationTimestamp="2025-12-04 17:28:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:46.524473935 +0000 UTC m=+176.556715943" watchObservedRunningTime="2025-12-04 17:30:46.605799001 +0000 UTC m=+176.638041019" Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.606987 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-kl45g" podStartSLOduration=150.606978685 podStartE2EDuration="2m30.606978685s" podCreationTimestamp="2025-12-04 17:28:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:46.604348629 +0000 UTC m=+176.636590627" watchObservedRunningTime="2025-12-04 17:30:46.606978685 +0000 UTC m=+176.639220703" Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.668049 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.688835 4631 patch_prober.go:28] interesting pod/router-default-5444994796-glqcf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 17:30:46 crc kubenswrapper[4631]: [-]has-synced failed: reason withheld Dec 04 17:30:46 crc kubenswrapper[4631]: [+]process-running ok Dec 04 17:30:46 crc kubenswrapper[4631]: healthz check failed Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.688948 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glqcf" podUID="db00623b-8c6a-45d1-ab8b-a7e4f81f64eb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.697515 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" podStartSLOduration=149.697487516 podStartE2EDuration="2m29.697487516s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:46.689199857 +0000 UTC m=+176.721441855" watchObservedRunningTime="2025-12-04 17:30:46.697487516 +0000 UTC m=+176.729729524" Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.706209 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:46 crc kubenswrapper[4631]: E1204 17:30:46.706973 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:47.20695733 +0000 UTC m=+177.239199328 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.810156 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:46 crc kubenswrapper[4631]: E1204 17:30:46.810534 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:47.310478616 +0000 UTC m=+177.342720624 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.811094 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:46 crc kubenswrapper[4631]: E1204 17:30:46.811660 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:47.31164788 +0000 UTC m=+177.343890058 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.912963 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:46 crc kubenswrapper[4631]: E1204 17:30:46.913429 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:47.413409776 +0000 UTC m=+177.445651774 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.977051 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s" event={"ID":"ff4cb9cc-d164-4771-8e5f-41acc28c25e6","Type":"ContainerStarted","Data":"774bed9f838da28004d305531adb455260a75cdb18cce5c63192143fab4e6f25"} Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.979585 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-8kcrj" event={"ID":"86903bd1-674d-4fa2-b9d1-dbc8f347b72b","Type":"ContainerStarted","Data":"6902ab3002c7f03f1c757f4e8a100029bca451192257d4ed935c91a9547bbd62"} Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.981149 4631 generic.go:334] "Generic (PLEG): container finished" podID="925f71fa-9882-47d0-9708-0c34ebb51df8" containerID="6f77176b7879880e09a63b89a71cd8cc753e8822e58ec8224703dc51c4231abe" exitCode=0 Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.981735 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" event={"ID":"925f71fa-9882-47d0-9708-0c34ebb51df8","Type":"ContainerDied","Data":"6f77176b7879880e09a63b89a71cd8cc753e8822e58ec8224703dc51c4231abe"} Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.982721 4631 patch_prober.go:28] interesting pod/downloads-7954f5f757-g65qn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Dec 04 17:30:46 crc kubenswrapper[4631]: I1204 17:30:46.982796 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-g65qn" podUID="832113b6-ef23-4863-8080-cea0494584e7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.018007 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:47 crc kubenswrapper[4631]: E1204 17:30:47.018432 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:47.518418725 +0000 UTC m=+177.550660723 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.020551 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-22m6s" podStartSLOduration=150.020530296 podStartE2EDuration="2m30.020530296s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:47.018838897 +0000 UTC m=+177.051080915" watchObservedRunningTime="2025-12-04 17:30:47.020530296 +0000 UTC m=+177.052772294" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.078985 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" podStartSLOduration=151.078966352 podStartE2EDuration="2m31.078966352s" podCreationTimestamp="2025-12-04 17:28:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:47.078467308 +0000 UTC m=+177.110709306" watchObservedRunningTime="2025-12-04 17:30:47.078966352 +0000 UTC m=+177.111208350" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.127387 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:47 crc kubenswrapper[4631]: E1204 17:30:47.131752 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:47.631715974 +0000 UTC m=+177.663958132 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.209940 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" podStartSLOduration=150.20991861 podStartE2EDuration="2m30.20991861s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:47.206210803 +0000 UTC m=+177.238452801" watchObservedRunningTime="2025-12-04 17:30:47.20991861 +0000 UTC m=+177.242160608" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.231800 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:47 crc kubenswrapper[4631]: E1204 17:30:47.250796 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:47.732397238 +0000 UTC m=+177.764639236 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.252053 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cpw5s" podStartSLOduration=150.252037785 podStartE2EDuration="2m30.252037785s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:47.251711835 +0000 UTC m=+177.283953843" watchObservedRunningTime="2025-12-04 17:30:47.252037785 +0000 UTC m=+177.284279783" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.332751 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:47 crc kubenswrapper[4631]: E1204 17:30:47.334417 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:47.834393501 +0000 UTC m=+177.866635499 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.338114 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-8kcrj" podStartSLOduration=151.338100618 podStartE2EDuration="2m31.338100618s" podCreationTimestamp="2025-12-04 17:28:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:47.330835088 +0000 UTC m=+177.363077106" watchObservedRunningTime="2025-12-04 17:30:47.338100618 +0000 UTC m=+177.370342626" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.436461 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:47 crc kubenswrapper[4631]: E1204 17:30:47.437034 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:47.937009191 +0000 UTC m=+177.969251189 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.517784 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-pqqkz" podStartSLOduration=150.517759981 podStartE2EDuration="2m30.517759981s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:47.493638765 +0000 UTC m=+177.525880763" watchObservedRunningTime="2025-12-04 17:30:47.517759981 +0000 UTC m=+177.550001979" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.518257 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w2p6c" podStartSLOduration=150.518250315 podStartE2EDuration="2m30.518250315s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:47.41859018 +0000 UTC m=+177.450832188" watchObservedRunningTime="2025-12-04 17:30:47.518250315 +0000 UTC m=+177.550492313" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.547899 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:47 crc kubenswrapper[4631]: E1204 17:30:47.548296 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:48.048280701 +0000 UTC m=+178.080522699 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.607350 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-gswpr" podStartSLOduration=150.607320865 podStartE2EDuration="2m30.607320865s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:47.607015236 +0000 UTC m=+177.639257244" watchObservedRunningTime="2025-12-04 17:30:47.607320865 +0000 UTC m=+177.639562863" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.646325 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-z2968" podStartSLOduration=150.646302009 podStartE2EDuration="2m30.646302009s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:47.645551998 +0000 UTC m=+177.677793996" watchObservedRunningTime="2025-12-04 17:30:47.646302009 +0000 UTC m=+177.678544007" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.650630 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:47 crc kubenswrapper[4631]: E1204 17:30:47.651089 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:48.151070257 +0000 UTC m=+178.183312245 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.675253 4631 patch_prober.go:28] interesting pod/router-default-5444994796-glqcf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 17:30:47 crc kubenswrapper[4631]: [-]has-synced failed: reason withheld Dec 04 17:30:47 crc kubenswrapper[4631]: [+]process-running ok Dec 04 17:30:47 crc kubenswrapper[4631]: healthz check failed Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.675308 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glqcf" podUID="db00623b-8c6a-45d1-ab8b-a7e4f81f64eb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.679612 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-96c5q" podStartSLOduration=150.67959577 podStartE2EDuration="2m30.67959577s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:47.677960003 +0000 UTC m=+177.710202001" watchObservedRunningTime="2025-12-04 17:30:47.67959577 +0000 UTC m=+177.711837768" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.752337 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:47 crc kubenswrapper[4631]: E1204 17:30:47.752739 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:48.252725459 +0000 UTC m=+178.284967457 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.756835 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs" podStartSLOduration=47.756817108 podStartE2EDuration="47.756817108s" podCreationTimestamp="2025-12-04 17:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:47.736724988 +0000 UTC m=+177.768966986" watchObservedRunningTime="2025-12-04 17:30:47.756817108 +0000 UTC m=+177.789059106" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.789378 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" podStartSLOduration=150.789348156 podStartE2EDuration="2m30.789348156s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:47.788635656 +0000 UTC m=+177.820877654" watchObservedRunningTime="2025-12-04 17:30:47.789348156 +0000 UTC m=+177.821590154" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.790293 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-pdgsq" podStartSLOduration=150.790287873 podStartE2EDuration="2m30.790287873s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:47.759409752 +0000 UTC m=+177.791651740" watchObservedRunningTime="2025-12-04 17:30:47.790287873 +0000 UTC m=+177.822529871" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.833381 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j54mq" podStartSLOduration=150.833342655 podStartE2EDuration="2m30.833342655s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:47.812049251 +0000 UTC m=+177.844291249" watchObservedRunningTime="2025-12-04 17:30:47.833342655 +0000 UTC m=+177.865584653" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.835622 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-g4684" podStartSLOduration=150.835613131 podStartE2EDuration="2m30.835613131s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:47.830489043 +0000 UTC m=+177.862731041" watchObservedRunningTime="2025-12-04 17:30:47.835613131 +0000 UTC m=+177.867855129" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.854176 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:47 crc kubenswrapper[4631]: E1204 17:30:47.854521 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:48.354510186 +0000 UTC m=+178.386752184 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.885336 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j" podStartSLOduration=150.885291754 podStartE2EDuration="2m30.885291754s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:47.885229372 +0000 UTC m=+177.917471370" watchObservedRunningTime="2025-12-04 17:30:47.885291754 +0000 UTC m=+177.917533762" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.905216 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fgbnd" podStartSLOduration=150.905201138 podStartE2EDuration="2m30.905201138s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:47.903625463 +0000 UTC m=+177.935867461" watchObservedRunningTime="2025-12-04 17:30:47.905201138 +0000 UTC m=+177.937443136" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.955936 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:47 crc kubenswrapper[4631]: E1204 17:30:47.956696 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:48.456677663 +0000 UTC m=+178.488919661 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.977050 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-vndv7" podStartSLOduration=11.977029111 podStartE2EDuration="11.977029111s" podCreationTimestamp="2025-12-04 17:30:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:47.919524722 +0000 UTC m=+177.951766720" watchObservedRunningTime="2025-12-04 17:30:47.977029111 +0000 UTC m=+178.009271109" Dec 04 17:30:47 crc kubenswrapper[4631]: I1204 17:30:47.979655 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" podStartSLOduration=150.979639086 podStartE2EDuration="2m30.979639086s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:47.971421649 +0000 UTC m=+178.003663647" watchObservedRunningTime="2025-12-04 17:30:47.979639086 +0000 UTC m=+178.011881084" Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.046654 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vtdlj" podStartSLOduration=151.046626798 podStartE2EDuration="2m31.046626798s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:48.044571659 +0000 UTC m=+178.076813657" watchObservedRunningTime="2025-12-04 17:30:48.046626798 +0000 UTC m=+178.078868796" Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.058594 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:48 crc kubenswrapper[4631]: E1204 17:30:48.062401 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:48.562387043 +0000 UTC m=+178.594629041 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.098195 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-j7kc7" podStartSLOduration=12.098164345 podStartE2EDuration="12.098164345s" podCreationTimestamp="2025-12-04 17:30:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:48.096332932 +0000 UTC m=+178.128574940" watchObservedRunningTime="2025-12-04 17:30:48.098164345 +0000 UTC m=+178.130406343" Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.160195 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:48 crc kubenswrapper[4631]: E1204 17:30:48.160336 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:48.660305328 +0000 UTC m=+178.692547326 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.160458 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:48 crc kubenswrapper[4631]: E1204 17:30:48.160794 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:48.660785932 +0000 UTC m=+178.693027930 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.261418 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:48 crc kubenswrapper[4631]: E1204 17:30:48.261831 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:48.761815946 +0000 UTC m=+178.794057934 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.363462 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:48 crc kubenswrapper[4631]: E1204 17:30:48.363768 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:48.863754777 +0000 UTC m=+178.895996765 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.464926 4631 patch_prober.go:28] interesting pod/downloads-7954f5f757-g65qn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.464976 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-g65qn" podUID="832113b6-ef23-4863-8080-cea0494584e7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.465705 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:48 crc kubenswrapper[4631]: E1204 17:30:48.466127 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:48.96608651 +0000 UTC m=+178.998328508 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.466482 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:48 crc kubenswrapper[4631]: E1204 17:30:48.466933 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:48.966926134 +0000 UTC m=+178.999168132 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.467265 4631 patch_prober.go:28] interesting pod/downloads-7954f5f757-g65qn container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.467299 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-g65qn" podUID="832113b6-ef23-4863-8080-cea0494584e7" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.479158 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.480212 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.480324 4631 patch_prober.go:28] interesting pod/console-f9d7485db-kl45g container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.40:8443/health\": dial tcp 10.217.0.40:8443: connect: connection refused" start-of-body= Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.480364 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-kl45g" podUID="81559ff3-95e9-455f-9d90-46c5f1a981ce" containerName="console" probeResult="failure" output="Get \"https://10.217.0.40:8443/health\": dial tcp 10.217.0.40:8443: connect: connection refused" Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.567998 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:48 crc kubenswrapper[4631]: E1204 17:30:48.569511 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:49.069494543 +0000 UTC m=+179.101736541 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.663567 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.669728 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:48 crc kubenswrapper[4631]: E1204 17:30:48.670098 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:49.170086445 +0000 UTC m=+179.202328443 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.671078 4631 patch_prober.go:28] interesting pod/router-default-5444994796-glqcf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 17:30:48 crc kubenswrapper[4631]: [-]has-synced failed: reason withheld Dec 04 17:30:48 crc kubenswrapper[4631]: [+]process-running ok Dec 04 17:30:48 crc kubenswrapper[4631]: healthz check failed Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.671107 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glqcf" podUID="db00623b-8c6a-45d1-ab8b-a7e4f81f64eb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.679201 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.683184 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.698245 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.713674 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.771013 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:48 crc kubenswrapper[4631]: E1204 17:30:48.771272 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:49.271228583 +0000 UTC m=+179.303470581 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.771467 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:48 crc kubenswrapper[4631]: E1204 17:30:48.772171 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:49.272145219 +0000 UTC m=+179.304387217 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.872770 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:48 crc kubenswrapper[4631]: E1204 17:30:48.873151 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:49.373136993 +0000 UTC m=+179.405378991 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.974657 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:48 crc kubenswrapper[4631]: E1204 17:30:48.975002 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:49.474990481 +0000 UTC m=+179.507232479 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.996723 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kkczr" event={"ID":"f495761a-e318-4f2a-a41a-7baa61110fe7","Type":"ContainerStarted","Data":"f7add08c10e19c8caf225554152259931eb1961ca07ca1972feeb75852947f47"} Dec 04 17:30:48 crc kubenswrapper[4631]: I1204 17:30:48.998334 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5" event={"ID":"37486438-b8c6-4b20-911f-f6e66393f414","Type":"ContainerStarted","Data":"b4d7ae9f6033fa210738a097cd6cc8c5b9cbfe8c525ebd487c91b2706f807f2b"} Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.076112 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:49 crc kubenswrapper[4631]: E1204 17:30:49.076640 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:49.576607133 +0000 UTC m=+179.608849161 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.167185 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.168483 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.178834 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:49 crc kubenswrapper[4631]: E1204 17:30:49.179303 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:49.679291005 +0000 UTC m=+179.711533003 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.186149 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.186230 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.200666 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.279580 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.279866 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a9332857-9c9d-41da-a671-c246f0b14252-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a9332857-9c9d-41da-a671-c246f0b14252\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.279943 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9332857-9c9d-41da-a671-c246f0b14252-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a9332857-9c9d-41da-a671-c246f0b14252\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 17:30:49 crc kubenswrapper[4631]: E1204 17:30:49.280060 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:49.780046262 +0000 UTC m=+179.812288260 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.381427 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a9332857-9c9d-41da-a671-c246f0b14252-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a9332857-9c9d-41da-a671-c246f0b14252\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.381521 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9332857-9c9d-41da-a671-c246f0b14252-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a9332857-9c9d-41da-a671-c246f0b14252\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.381569 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:49 crc kubenswrapper[4631]: E1204 17:30:49.381893 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:49.88188237 +0000 UTC m=+179.914124368 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.382059 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a9332857-9c9d-41da-a671-c246f0b14252-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a9332857-9c9d-41da-a671-c246f0b14252\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.399435 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.426903 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9332857-9c9d-41da-a671-c246f0b14252-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a9332857-9c9d-41da-a671-c246f0b14252\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.454286 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bmh96"] Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.455257 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bmh96" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.458512 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.481026 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.482328 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:49 crc kubenswrapper[4631]: E1204 17:30:49.482798 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:49.982778391 +0000 UTC m=+180.015020389 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.483404 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bmh96"] Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.486540 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.524696 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j54mq" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.551268 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j54mq" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.597303 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snnrg\" (UniqueName: \"kubernetes.io/projected/e633bf80-04ad-4770-abc5-5d453077543c-kube-api-access-snnrg\") pod \"certified-operators-bmh96\" (UID: \"e633bf80-04ad-4770-abc5-5d453077543c\") " pod="openshift-marketplace/certified-operators-bmh96" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.597784 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e633bf80-04ad-4770-abc5-5d453077543c-utilities\") pod \"certified-operators-bmh96\" (UID: \"e633bf80-04ad-4770-abc5-5d453077543c\") " pod="openshift-marketplace/certified-operators-bmh96" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.597856 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.597953 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e633bf80-04ad-4770-abc5-5d453077543c-catalog-content\") pod \"certified-operators-bmh96\" (UID: \"e633bf80-04ad-4770-abc5-5d453077543c\") " pod="openshift-marketplace/certified-operators-bmh96" Dec 04 17:30:49 crc kubenswrapper[4631]: E1204 17:30:49.599290 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:50.099279341 +0000 UTC m=+180.131521339 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.628705 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-n9bhz"] Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.637019 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9bhz" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.655449 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.669934 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n9bhz"] Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.680755 4631 patch_prober.go:28] interesting pod/router-default-5444994796-glqcf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 17:30:49 crc kubenswrapper[4631]: [-]has-synced failed: reason withheld Dec 04 17:30:49 crc kubenswrapper[4631]: [+]process-running ok Dec 04 17:30:49 crc kubenswrapper[4631]: healthz check failed Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.680815 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glqcf" podUID="db00623b-8c6a-45d1-ab8b-a7e4f81f64eb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.699047 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.699203 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/885a0356-3029-482f-b3b5-3caa01e19c62-catalog-content\") pod \"community-operators-n9bhz\" (UID: \"885a0356-3029-482f-b3b5-3caa01e19c62\") " pod="openshift-marketplace/community-operators-n9bhz" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.699289 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e633bf80-04ad-4770-abc5-5d453077543c-catalog-content\") pod \"certified-operators-bmh96\" (UID: \"e633bf80-04ad-4770-abc5-5d453077543c\") " pod="openshift-marketplace/certified-operators-bmh96" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.699306 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/885a0356-3029-482f-b3b5-3caa01e19c62-utilities\") pod \"community-operators-n9bhz\" (UID: \"885a0356-3029-482f-b3b5-3caa01e19c62\") " pod="openshift-marketplace/community-operators-n9bhz" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.699329 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbc2n\" (UniqueName: \"kubernetes.io/projected/885a0356-3029-482f-b3b5-3caa01e19c62-kube-api-access-pbc2n\") pod \"community-operators-n9bhz\" (UID: \"885a0356-3029-482f-b3b5-3caa01e19c62\") " pod="openshift-marketplace/community-operators-n9bhz" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.699352 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snnrg\" (UniqueName: \"kubernetes.io/projected/e633bf80-04ad-4770-abc5-5d453077543c-kube-api-access-snnrg\") pod \"certified-operators-bmh96\" (UID: \"e633bf80-04ad-4770-abc5-5d453077543c\") " pod="openshift-marketplace/certified-operators-bmh96" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.699449 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e633bf80-04ad-4770-abc5-5d453077543c-utilities\") pod \"certified-operators-bmh96\" (UID: \"e633bf80-04ad-4770-abc5-5d453077543c\") " pod="openshift-marketplace/certified-operators-bmh96" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.699873 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e633bf80-04ad-4770-abc5-5d453077543c-utilities\") pod \"certified-operators-bmh96\" (UID: \"e633bf80-04ad-4770-abc5-5d453077543c\") " pod="openshift-marketplace/certified-operators-bmh96" Dec 04 17:30:49 crc kubenswrapper[4631]: E1204 17:30:49.699968 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:50.199951996 +0000 UTC m=+180.232193994 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.771843 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vtdlj" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.785386 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vtdlj" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.800149 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.800223 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/885a0356-3029-482f-b3b5-3caa01e19c62-utilities\") pod \"community-operators-n9bhz\" (UID: \"885a0356-3029-482f-b3b5-3caa01e19c62\") " pod="openshift-marketplace/community-operators-n9bhz" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.800256 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbc2n\" (UniqueName: \"kubernetes.io/projected/885a0356-3029-482f-b3b5-3caa01e19c62-kube-api-access-pbc2n\") pod \"community-operators-n9bhz\" (UID: \"885a0356-3029-482f-b3b5-3caa01e19c62\") " pod="openshift-marketplace/community-operators-n9bhz" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.800299 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/885a0356-3029-482f-b3b5-3caa01e19c62-catalog-content\") pod \"community-operators-n9bhz\" (UID: \"885a0356-3029-482f-b3b5-3caa01e19c62\") " pod="openshift-marketplace/community-operators-n9bhz" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.800877 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/885a0356-3029-482f-b3b5-3caa01e19c62-catalog-content\") pod \"community-operators-n9bhz\" (UID: \"885a0356-3029-482f-b3b5-3caa01e19c62\") " pod="openshift-marketplace/community-operators-n9bhz" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.800939 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/885a0356-3029-482f-b3b5-3caa01e19c62-utilities\") pod \"community-operators-n9bhz\" (UID: \"885a0356-3029-482f-b3b5-3caa01e19c62\") " pod="openshift-marketplace/community-operators-n9bhz" Dec 04 17:30:49 crc kubenswrapper[4631]: E1204 17:30:49.801076 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:50.301062622 +0000 UTC m=+180.333304630 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.817858 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.818587 4631 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-d97wj container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" start-of-body= Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.818629 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" podUID="dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.818939 4631 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-d97wj container/marketplace-operator namespace/openshift-marketplace: Liveness probe status=failure output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" start-of-body= Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.818963 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" podUID="dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.819001 4631 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-d97wj container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" start-of-body= Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.819011 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" podUID="dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.837382 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbc2n\" (UniqueName: \"kubernetes.io/projected/885a0356-3029-482f-b3b5-3caa01e19c62-kube-api-access-pbc2n\") pod \"community-operators-n9bhz\" (UID: \"885a0356-3029-482f-b3b5-3caa01e19c62\") " pod="openshift-marketplace/community-operators-n9bhz" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.847459 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-l85pg"] Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.848459 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l85pg" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.875198 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l85pg"] Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.901580 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.902506 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cb73c44-e995-4e73-9bd3-422c00633ddf-utilities\") pod \"certified-operators-l85pg\" (UID: \"5cb73c44-e995-4e73-9bd3-422c00633ddf\") " pod="openshift-marketplace/certified-operators-l85pg" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.902777 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cb73c44-e995-4e73-9bd3-422c00633ddf-catalog-content\") pod \"certified-operators-l85pg\" (UID: \"5cb73c44-e995-4e73-9bd3-422c00633ddf\") " pod="openshift-marketplace/certified-operators-l85pg" Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.902905 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74nx8\" (UniqueName: \"kubernetes.io/projected/5cb73c44-e995-4e73-9bd3-422c00633ddf-kube-api-access-74nx8\") pod \"certified-operators-l85pg\" (UID: \"5cb73c44-e995-4e73-9bd3-422c00633ddf\") " pod="openshift-marketplace/certified-operators-l85pg" Dec 04 17:30:49 crc kubenswrapper[4631]: E1204 17:30:49.903646 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:50.403630021 +0000 UTC m=+180.435872019 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:49 crc kubenswrapper[4631]: I1204 17:30:49.972870 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9bhz" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.005136 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cb73c44-e995-4e73-9bd3-422c00633ddf-catalog-content\") pod \"certified-operators-l85pg\" (UID: \"5cb73c44-e995-4e73-9bd3-422c00633ddf\") " pod="openshift-marketplace/certified-operators-l85pg" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.005214 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74nx8\" (UniqueName: \"kubernetes.io/projected/5cb73c44-e995-4e73-9bd3-422c00633ddf-kube-api-access-74nx8\") pod \"certified-operators-l85pg\" (UID: \"5cb73c44-e995-4e73-9bd3-422c00633ddf\") " pod="openshift-marketplace/certified-operators-l85pg" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.005277 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cb73c44-e995-4e73-9bd3-422c00633ddf-utilities\") pod \"certified-operators-l85pg\" (UID: \"5cb73c44-e995-4e73-9bd3-422c00633ddf\") " pod="openshift-marketplace/certified-operators-l85pg" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.005311 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:50 crc kubenswrapper[4631]: E1204 17:30:50.005599 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:50.505588203 +0000 UTC m=+180.537830201 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.006246 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cb73c44-e995-4e73-9bd3-422c00633ddf-catalog-content\") pod \"certified-operators-l85pg\" (UID: \"5cb73c44-e995-4e73-9bd3-422c00633ddf\") " pod="openshift-marketplace/certified-operators-l85pg" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.008172 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e633bf80-04ad-4770-abc5-5d453077543c-catalog-content\") pod \"certified-operators-bmh96\" (UID: \"e633bf80-04ad-4770-abc5-5d453077543c\") " pod="openshift-marketplace/certified-operators-bmh96" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.011619 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cb73c44-e995-4e73-9bd3-422c00633ddf-utilities\") pod \"certified-operators-l85pg\" (UID: \"5cb73c44-e995-4e73-9bd3-422c00633ddf\") " pod="openshift-marketplace/certified-operators-l85pg" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.037067 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sgr7v" event={"ID":"c6867584-8090-43a9-bf55-f3fda830e5f0","Type":"ContainerStarted","Data":"8ee69d70826bd00bdb1d001f4bf0a533281213f8bbb10a1d0459e85f2cd2e4a0"} Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.046196 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snnrg\" (UniqueName: \"kubernetes.io/projected/e633bf80-04ad-4770-abc5-5d453077543c-kube-api-access-snnrg\") pod \"certified-operators-bmh96\" (UID: \"e633bf80-04ad-4770-abc5-5d453077543c\") " pod="openshift-marketplace/certified-operators-bmh96" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.046762 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bvzck"] Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.047779 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bvzck" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.076778 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bmh96" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.103530 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74nx8\" (UniqueName: \"kubernetes.io/projected/5cb73c44-e995-4e73-9bd3-422c00633ddf-kube-api-access-74nx8\") pod \"certified-operators-l85pg\" (UID: \"5cb73c44-e995-4e73-9bd3-422c00633ddf\") " pod="openshift-marketplace/certified-operators-l85pg" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.106105 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.106391 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcr6b\" (UniqueName: \"kubernetes.io/projected/5622ec39-e11e-44c2-b059-47d6fc091328-kube-api-access-hcr6b\") pod \"community-operators-bvzck\" (UID: \"5622ec39-e11e-44c2-b059-47d6fc091328\") " pod="openshift-marketplace/community-operators-bvzck" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.106428 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5622ec39-e11e-44c2-b059-47d6fc091328-catalog-content\") pod \"community-operators-bvzck\" (UID: \"5622ec39-e11e-44c2-b059-47d6fc091328\") " pod="openshift-marketplace/community-operators-bvzck" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.106593 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5622ec39-e11e-44c2-b059-47d6fc091328-utilities\") pod \"community-operators-bvzck\" (UID: \"5622ec39-e11e-44c2-b059-47d6fc091328\") " pod="openshift-marketplace/community-operators-bvzck" Dec 04 17:30:50 crc kubenswrapper[4631]: E1204 17:30:50.107638 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:50.607612266 +0000 UTC m=+180.639854264 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.128581 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bvzck"] Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.208720 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l85pg" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.209782 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5622ec39-e11e-44c2-b059-47d6fc091328-utilities\") pod \"community-operators-bvzck\" (UID: \"5622ec39-e11e-44c2-b059-47d6fc091328\") " pod="openshift-marketplace/community-operators-bvzck" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.209852 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcr6b\" (UniqueName: \"kubernetes.io/projected/5622ec39-e11e-44c2-b059-47d6fc091328-kube-api-access-hcr6b\") pod \"community-operators-bvzck\" (UID: \"5622ec39-e11e-44c2-b059-47d6fc091328\") " pod="openshift-marketplace/community-operators-bvzck" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.209886 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5622ec39-e11e-44c2-b059-47d6fc091328-catalog-content\") pod \"community-operators-bvzck\" (UID: \"5622ec39-e11e-44c2-b059-47d6fc091328\") " pod="openshift-marketplace/community-operators-bvzck" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.209926 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:50 crc kubenswrapper[4631]: E1204 17:30:50.210186 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:50.710172314 +0000 UTC m=+180.742414312 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.210896 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5622ec39-e11e-44c2-b059-47d6fc091328-utilities\") pod \"community-operators-bvzck\" (UID: \"5622ec39-e11e-44c2-b059-47d6fc091328\") " pod="openshift-marketplace/community-operators-bvzck" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.213934 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5622ec39-e11e-44c2-b059-47d6fc091328-catalog-content\") pod \"community-operators-bvzck\" (UID: \"5622ec39-e11e-44c2-b059-47d6fc091328\") " pod="openshift-marketplace/community-operators-bvzck" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.311276 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:50 crc kubenswrapper[4631]: E1204 17:30:50.314607 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:50.814559486 +0000 UTC m=+180.846801494 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.316405 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:50 crc kubenswrapper[4631]: E1204 17:30:50.317010 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:50.816993906 +0000 UTC m=+180.849235904 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.317160 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcr6b\" (UniqueName: \"kubernetes.io/projected/5622ec39-e11e-44c2-b059-47d6fc091328-kube-api-access-hcr6b\") pod \"community-operators-bvzck\" (UID: \"5622ec39-e11e-44c2-b059-47d6fc091328\") " pod="openshift-marketplace/community-operators-bvzck" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.353391 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhq8j" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.399288 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.408459 4631 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-bvw8n container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.408780 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" podUID="dbc61eb8-0a7c-4ea2-8f50-5a2522daa465" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.410436 4631 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-bvw8n container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.410493 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" podUID="dbc61eb8-0a7c-4ea2-8f50-5a2522daa465" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.427258 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:50 crc kubenswrapper[4631]: E1204 17:30:50.427950 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:50.927935737 +0000 UTC m=+180.960177735 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.428441 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bvzck" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.567347 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:50 crc kubenswrapper[4631]: E1204 17:30:50.567924 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:51.067899735 +0000 UTC m=+181.100141733 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.672048 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:50 crc kubenswrapper[4631]: E1204 17:30:50.672441 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:51.17242724 +0000 UTC m=+181.204669238 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.687506 4631 patch_prober.go:28] interesting pod/router-default-5444994796-glqcf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 17:30:50 crc kubenswrapper[4631]: [-]has-synced failed: reason withheld Dec 04 17:30:50 crc kubenswrapper[4631]: [+]process-running ok Dec 04 17:30:50 crc kubenswrapper[4631]: healthz check failed Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.687573 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glqcf" podUID="db00623b-8c6a-45d1-ab8b-a7e4f81f64eb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.775275 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:50 crc kubenswrapper[4631]: E1204 17:30:50.775997 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:51.275985898 +0000 UTC m=+181.308227896 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.879075 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:50 crc kubenswrapper[4631]: E1204 17:30:50.879450 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:51.379435613 +0000 UTC m=+181.411677601 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:50 crc kubenswrapper[4631]: I1204 17:30:50.983309 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:50 crc kubenswrapper[4631]: E1204 17:30:50.983647 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:51.483635649 +0000 UTC m=+181.515877647 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.025309 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n9bhz"] Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.085399 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:51 crc kubenswrapper[4631]: E1204 17:30:51.086074 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:51.586059644 +0000 UTC m=+181.618301642 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.132640 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6lshd" event={"ID":"db12e396-cc65-44e6-89f4-e4458958f443","Type":"ContainerStarted","Data":"99cce018a3794611e565d9757fe8d4e589cf5dd23f1de101830ebdd1d141dfd2"} Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.166682 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6lshd" podStartSLOduration=154.166666179 podStartE2EDuration="2m34.166666179s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:51.165811014 +0000 UTC m=+181.198053022" watchObservedRunningTime="2025-12-04 17:30:51.166666179 +0000 UTC m=+181.198908177" Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.194948 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-h5fb2" event={"ID":"6b035e50-4c55-4081-8ff2-3f720a8fa1b4","Type":"ContainerStarted","Data":"10af58ee53437ec7a79f12f98eb204c24d24eac2aba6040177108bc9763d660c"} Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.194996 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-h5fb2" Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.195416 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:51 crc kubenswrapper[4631]: E1204 17:30:51.195734 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:51.695723067 +0000 UTC m=+181.727965065 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.197233 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a9332857-9c9d-41da-a671-c246f0b14252","Type":"ContainerStarted","Data":"27ad7a84df7e8f67fee38b969b746a853fd9eb10600780478911bbe3e1935352"} Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.247708 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-h5fb2" podStartSLOduration=15.247687137 podStartE2EDuration="15.247687137s" podCreationTimestamp="2025-12-04 17:30:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:51.245989998 +0000 UTC m=+181.278232006" watchObservedRunningTime="2025-12-04 17:30:51.247687137 +0000 UTC m=+181.279929135" Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.262658 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" event={"ID":"200bbd12-8133-46c5-a69f-ed2dd0b5e191","Type":"ContainerStarted","Data":"f0ae2b4838944cae78b8f7121288d297bd49019084fb2585d5243ee38a1b9b2e"} Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.290798 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9qbmf" event={"ID":"4ef93ccc-0327-4a63-8f44-4cb3f9e1ba83","Type":"ContainerStarted","Data":"5b21f2d0dab95e6579496b40caafe09e861c8a0afe5c733566f81838567f6ceb"} Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.296979 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:51 crc kubenswrapper[4631]: E1204 17:30:51.298273 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:51.798257365 +0000 UTC m=+181.830499363 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.314406 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bvzck"] Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.326010 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9qbmf" podStartSLOduration=155.325996396 podStartE2EDuration="2m35.325996396s" podCreationTimestamp="2025-12-04 17:28:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:51.323818833 +0000 UTC m=+181.356060831" watchObservedRunningTime="2025-12-04 17:30:51.325996396 +0000 UTC m=+181.358238384" Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.357081 4631 generic.go:334] "Generic (PLEG): container finished" podID="4212359b-081e-4b11-8ca7-87cb9ff33a1c" containerID="72e4cb61ae27c39b1fe5d633cd183b6c90f27c95b6163743193fa3f2cf45de48" exitCode=0 Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.357144 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs" event={"ID":"4212359b-081e-4b11-8ca7-87cb9ff33a1c","Type":"ContainerDied","Data":"72e4cb61ae27c39b1fe5d633cd183b6c90f27c95b6163743193fa3f2cf45de48"} Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.398262 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:51 crc kubenswrapper[4631]: E1204 17:30:51.398553 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:51.898541859 +0000 UTC m=+181.930783857 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.412862 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4wt5l" event={"ID":"496d955a-3328-4f38-86db-e5c382672b27","Type":"ContainerStarted","Data":"856eca2f34efd44a98559eff7df9cd9da7831ea5d7fe793b3098ae3db8a838c2"} Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.413044 4631 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-bvw8n container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.413081 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" podUID="dbc61eb8-0a7c-4ea2-8f50-5a2522daa465" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.465928 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-68slc" event={"ID":"1892d78f-ccb4-491b-8622-84340df88183","Type":"ContainerStarted","Data":"7b3a0fab446cd152fbae9153446bfbe1d4a65602093117ed99cb48767a99fda4"} Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.501989 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:51 crc kubenswrapper[4631]: E1204 17:30:51.503525 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:52.003506277 +0000 UTC m=+182.035748275 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.504509 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" event={"ID":"30620276-02bb-4cea-a50e-36fc7d4689ae","Type":"ContainerStarted","Data":"8ecb293d3380b7cd4fe0030d8dcb74d46bf2449437ac042a9f8799ad70cdaa0a"} Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.526899 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4wt5l" podStartSLOduration=155.526880671 podStartE2EDuration="2m35.526880671s" podCreationTimestamp="2025-12-04 17:28:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:51.474952723 +0000 UTC m=+181.507194721" watchObservedRunningTime="2025-12-04 17:30:51.526880671 +0000 UTC m=+181.559122669" Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.570061 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-swgqn" event={"ID":"87263d28-d75f-4701-9ac1-576084547adf","Type":"ContainerStarted","Data":"e1f87c7c2d63eb3fcd99ca3ffd1cfe4d2450eb0530114c0ae05cedcde7097150"} Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.604517 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:51 crc kubenswrapper[4631]: E1204 17:30:51.604855 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:52.10483794 +0000 UTC m=+182.137079958 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.617817 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" event={"ID":"925f71fa-9882-47d0-9708-0c34ebb51df8","Type":"ContainerStarted","Data":"1ec63d33bf8921132222d9abf5bd8e035530e08819b08da884aca1a456937130"} Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.618416 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kkczr" Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.652027 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-swgqn" podStartSLOduration=154.652008071 podStartE2EDuration="2m34.652008071s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:51.65059687 +0000 UTC m=+181.682838878" watchObservedRunningTime="2025-12-04 17:30:51.652008071 +0000 UTC m=+181.684250089" Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.653138 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-68slc" podStartSLOduration=154.653126183 podStartE2EDuration="2m34.653126183s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:51.527914331 +0000 UTC m=+181.560156359" watchObservedRunningTime="2025-12-04 17:30:51.653126183 +0000 UTC m=+181.685368191" Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.668244 4631 patch_prober.go:28] interesting pod/router-default-5444994796-glqcf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 17:30:51 crc kubenswrapper[4631]: [-]has-synced failed: reason withheld Dec 04 17:30:51 crc kubenswrapper[4631]: [+]process-running ok Dec 04 17:30:51 crc kubenswrapper[4631]: healthz check failed Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.681481 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glqcf" podUID="db00623b-8c6a-45d1-ab8b-a7e4f81f64eb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.707308 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:51 crc kubenswrapper[4631]: E1204 17:30:51.709251 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:52.209231412 +0000 UTC m=+182.241473410 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.723054 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bmh96"] Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.738775 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l85pg"] Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.753789 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qxmc5" podStartSLOduration=154.753761256 podStartE2EDuration="2m34.753761256s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:51.735225802 +0000 UTC m=+181.767467800" watchObservedRunningTime="2025-12-04 17:30:51.753761256 +0000 UTC m=+181.786003254" Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.814204 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:51 crc kubenswrapper[4631]: E1204 17:30:51.814520 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:52.314507129 +0000 UTC m=+182.346749127 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.838084 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" podStartSLOduration=154.838046858 podStartE2EDuration="2m34.838046858s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:51.813450898 +0000 UTC m=+181.845692896" watchObservedRunningTime="2025-12-04 17:30:51.838046858 +0000 UTC m=+181.870288856" Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.873927 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nx6gn"] Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.893789 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-sgr7v" podStartSLOduration=154.893768476 podStartE2EDuration="2m34.893768476s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:51.871984277 +0000 UTC m=+181.904226275" watchObservedRunningTime="2025-12-04 17:30:51.893768476 +0000 UTC m=+181.926010484" Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.907634 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nx6gn" Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.911844 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.917132 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nx6gn"] Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.918469 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:51 crc kubenswrapper[4631]: E1204 17:30:51.922454 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:52.42235235 +0000 UTC m=+182.454594348 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.926473 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kkczr" podStartSLOduration=154.926454328 podStartE2EDuration="2m34.926454328s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:51.926075418 +0000 UTC m=+181.958317416" watchObservedRunningTime="2025-12-04 17:30:51.926454328 +0000 UTC m=+181.958696326" Dec 04 17:30:51 crc kubenswrapper[4631]: I1204 17:30:51.926524 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:51 crc kubenswrapper[4631]: E1204 17:30:51.933738 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:52.433699517 +0000 UTC m=+182.465941515 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.027657 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.028243 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7gkq\" (UniqueName: \"kubernetes.io/projected/29451954-9708-42c3-87aa-e28d06a5c640-kube-api-access-b7gkq\") pod \"redhat-marketplace-nx6gn\" (UID: \"29451954-9708-42c3-87aa-e28d06a5c640\") " pod="openshift-marketplace/redhat-marketplace-nx6gn" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.028282 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29451954-9708-42c3-87aa-e28d06a5c640-utilities\") pod \"redhat-marketplace-nx6gn\" (UID: \"29451954-9708-42c3-87aa-e28d06a5c640\") " pod="openshift-marketplace/redhat-marketplace-nx6gn" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.028310 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29451954-9708-42c3-87aa-e28d06a5c640-catalog-content\") pod \"redhat-marketplace-nx6gn\" (UID: \"29451954-9708-42c3-87aa-e28d06a5c640\") " pod="openshift-marketplace/redhat-marketplace-nx6gn" Dec 04 17:30:52 crc kubenswrapper[4631]: E1204 17:30:52.028484 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:52.528469452 +0000 UTC m=+182.560711450 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.130917 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.130956 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7gkq\" (UniqueName: \"kubernetes.io/projected/29451954-9708-42c3-87aa-e28d06a5c640-kube-api-access-b7gkq\") pod \"redhat-marketplace-nx6gn\" (UID: \"29451954-9708-42c3-87aa-e28d06a5c640\") " pod="openshift-marketplace/redhat-marketplace-nx6gn" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.130992 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29451954-9708-42c3-87aa-e28d06a5c640-utilities\") pod \"redhat-marketplace-nx6gn\" (UID: \"29451954-9708-42c3-87aa-e28d06a5c640\") " pod="openshift-marketplace/redhat-marketplace-nx6gn" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.131019 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29451954-9708-42c3-87aa-e28d06a5c640-catalog-content\") pod \"redhat-marketplace-nx6gn\" (UID: \"29451954-9708-42c3-87aa-e28d06a5c640\") " pod="openshift-marketplace/redhat-marketplace-nx6gn" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.131474 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29451954-9708-42c3-87aa-e28d06a5c640-catalog-content\") pod \"redhat-marketplace-nx6gn\" (UID: \"29451954-9708-42c3-87aa-e28d06a5c640\") " pod="openshift-marketplace/redhat-marketplace-nx6gn" Dec 04 17:30:52 crc kubenswrapper[4631]: E1204 17:30:52.131711 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:52.63170134 +0000 UTC m=+182.663943338 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.132052 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29451954-9708-42c3-87aa-e28d06a5c640-utilities\") pod \"redhat-marketplace-nx6gn\" (UID: \"29451954-9708-42c3-87aa-e28d06a5c640\") " pod="openshift-marketplace/redhat-marketplace-nx6gn" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.163546 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7gkq\" (UniqueName: \"kubernetes.io/projected/29451954-9708-42c3-87aa-e28d06a5c640-kube-api-access-b7gkq\") pod \"redhat-marketplace-nx6gn\" (UID: \"29451954-9708-42c3-87aa-e28d06a5c640\") " pod="openshift-marketplace/redhat-marketplace-nx6gn" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.210652 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dr5fh"] Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.211618 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dr5fh" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.232246 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dr5fh"] Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.233016 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:52 crc kubenswrapper[4631]: E1204 17:30:52.233407 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:52.733392523 +0000 UTC m=+182.765634521 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.303973 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nx6gn" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.334336 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.334470 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63aa12c5-8868-471f-929f-df3697cc07ae-utilities\") pod \"redhat-marketplace-dr5fh\" (UID: \"63aa12c5-8868-471f-929f-df3697cc07ae\") " pod="openshift-marketplace/redhat-marketplace-dr5fh" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.334505 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63aa12c5-8868-471f-929f-df3697cc07ae-catalog-content\") pod \"redhat-marketplace-dr5fh\" (UID: \"63aa12c5-8868-471f-929f-df3697cc07ae\") " pod="openshift-marketplace/redhat-marketplace-dr5fh" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.334526 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjlwh\" (UniqueName: \"kubernetes.io/projected/63aa12c5-8868-471f-929f-df3697cc07ae-kube-api-access-fjlwh\") pod \"redhat-marketplace-dr5fh\" (UID: \"63aa12c5-8868-471f-929f-df3697cc07ae\") " pod="openshift-marketplace/redhat-marketplace-dr5fh" Dec 04 17:30:52 crc kubenswrapper[4631]: E1204 17:30:52.334832 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:52.83481591 +0000 UTC m=+182.867057918 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.435551 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.435792 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63aa12c5-8868-471f-929f-df3697cc07ae-utilities\") pod \"redhat-marketplace-dr5fh\" (UID: \"63aa12c5-8868-471f-929f-df3697cc07ae\") " pod="openshift-marketplace/redhat-marketplace-dr5fh" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.435834 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63aa12c5-8868-471f-929f-df3697cc07ae-catalog-content\") pod \"redhat-marketplace-dr5fh\" (UID: \"63aa12c5-8868-471f-929f-df3697cc07ae\") " pod="openshift-marketplace/redhat-marketplace-dr5fh" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.435859 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjlwh\" (UniqueName: \"kubernetes.io/projected/63aa12c5-8868-471f-929f-df3697cc07ae-kube-api-access-fjlwh\") pod \"redhat-marketplace-dr5fh\" (UID: \"63aa12c5-8868-471f-929f-df3697cc07ae\") " pod="openshift-marketplace/redhat-marketplace-dr5fh" Dec 04 17:30:52 crc kubenswrapper[4631]: E1204 17:30:52.436265 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:52.936249086 +0000 UTC m=+182.968491084 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.436743 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63aa12c5-8868-471f-929f-df3697cc07ae-utilities\") pod \"redhat-marketplace-dr5fh\" (UID: \"63aa12c5-8868-471f-929f-df3697cc07ae\") " pod="openshift-marketplace/redhat-marketplace-dr5fh" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.436919 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63aa12c5-8868-471f-929f-df3697cc07ae-catalog-content\") pod \"redhat-marketplace-dr5fh\" (UID: \"63aa12c5-8868-471f-929f-df3697cc07ae\") " pod="openshift-marketplace/redhat-marketplace-dr5fh" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.459432 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjlwh\" (UniqueName: \"kubernetes.io/projected/63aa12c5-8868-471f-929f-df3697cc07ae-kube-api-access-fjlwh\") pod \"redhat-marketplace-dr5fh\" (UID: \"63aa12c5-8868-471f-929f-df3697cc07ae\") " pod="openshift-marketplace/redhat-marketplace-dr5fh" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.531047 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dr5fh" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.537334 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:52 crc kubenswrapper[4631]: E1204 17:30:52.537894 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:53.037866307 +0000 UTC m=+183.070108485 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.613641 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-54924"] Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.614987 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-54924" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.622083 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.622699 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-54924"] Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.643252 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:52 crc kubenswrapper[4631]: E1204 17:30:52.643645 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:53.143619998 +0000 UTC m=+183.175861996 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.644198 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.644315 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bmh96" event={"ID":"e633bf80-04ad-4770-abc5-5d453077543c","Type":"ContainerStarted","Data":"fb1572ad85b74f5c351c1d8c1410f5380c11d1084cc3782588e45992dbee521b"} Dec 04 17:30:52 crc kubenswrapper[4631]: E1204 17:30:52.644669 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:53.144654828 +0000 UTC m=+183.176896826 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.646217 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9bhz" event={"ID":"885a0356-3029-482f-b3b5-3caa01e19c62","Type":"ContainerStarted","Data":"e89f4bb68089b566fb45b4a334d1a7d58645ab1bba453540506ed6f288befb36"} Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.651229 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bvzck" event={"ID":"5622ec39-e11e-44c2-b059-47d6fc091328","Type":"ContainerStarted","Data":"f50665149f641a9fb28136a2c1e3e33f9d645a6766c2b929e8bd1ff3c9ac49e7"} Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.655863 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l85pg" event={"ID":"5cb73c44-e995-4e73-9bd3-422c00633ddf","Type":"ContainerStarted","Data":"5ba12a072c105c566b67df94f434ddf3c098765b38c4cb7cf900a5b304f4e02e"} Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.694972 4631 patch_prober.go:28] interesting pod/router-default-5444994796-glqcf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 17:30:52 crc kubenswrapper[4631]: [-]has-synced failed: reason withheld Dec 04 17:30:52 crc kubenswrapper[4631]: [+]process-running ok Dec 04 17:30:52 crc kubenswrapper[4631]: healthz check failed Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.695064 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glqcf" podUID="db00623b-8c6a-45d1-ab8b-a7e4f81f64eb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.747095 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.747426 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ff644ad-bdd2-4306-918d-fe5a0bfbc964-utilities\") pod \"redhat-operators-54924\" (UID: \"2ff644ad-bdd2-4306-918d-fe5a0bfbc964\") " pod="openshift-marketplace/redhat-operators-54924" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.747481 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6jrn\" (UniqueName: \"kubernetes.io/projected/2ff644ad-bdd2-4306-918d-fe5a0bfbc964-kube-api-access-m6jrn\") pod \"redhat-operators-54924\" (UID: \"2ff644ad-bdd2-4306-918d-fe5a0bfbc964\") " pod="openshift-marketplace/redhat-operators-54924" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.747621 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ff644ad-bdd2-4306-918d-fe5a0bfbc964-catalog-content\") pod \"redhat-operators-54924\" (UID: \"2ff644ad-bdd2-4306-918d-fe5a0bfbc964\") " pod="openshift-marketplace/redhat-operators-54924" Dec 04 17:30:52 crc kubenswrapper[4631]: E1204 17:30:52.747776 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:53.247756473 +0000 UTC m=+183.279998471 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.754175 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nx6gn"] Dec 04 17:30:52 crc kubenswrapper[4631]: W1204 17:30:52.788710 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod29451954_9708_42c3_87aa_e28d06a5c640.slice/crio-bebb6a658d63134aee4940514501c0dfebde56a72886f6168d3185325aa9356c WatchSource:0}: Error finding container bebb6a658d63134aee4940514501c0dfebde56a72886f6168d3185325aa9356c: Status 404 returned error can't find the container with id bebb6a658d63134aee4940514501c0dfebde56a72886f6168d3185325aa9356c Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.817989 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bxvz7"] Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.819113 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bxvz7" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.852647 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ff644ad-bdd2-4306-918d-fe5a0bfbc964-catalog-content\") pod \"redhat-operators-54924\" (UID: \"2ff644ad-bdd2-4306-918d-fe5a0bfbc964\") " pod="openshift-marketplace/redhat-operators-54924" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.852782 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ff644ad-bdd2-4306-918d-fe5a0bfbc964-utilities\") pod \"redhat-operators-54924\" (UID: \"2ff644ad-bdd2-4306-918d-fe5a0bfbc964\") " pod="openshift-marketplace/redhat-operators-54924" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.852899 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6jrn\" (UniqueName: \"kubernetes.io/projected/2ff644ad-bdd2-4306-918d-fe5a0bfbc964-kube-api-access-m6jrn\") pod \"redhat-operators-54924\" (UID: \"2ff644ad-bdd2-4306-918d-fe5a0bfbc964\") " pod="openshift-marketplace/redhat-operators-54924" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.853243 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.860379 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bxvz7"] Dec 04 17:30:52 crc kubenswrapper[4631]: E1204 17:30:52.860996 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:53.360970089 +0000 UTC m=+183.393212087 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.861989 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ff644ad-bdd2-4306-918d-fe5a0bfbc964-catalog-content\") pod \"redhat-operators-54924\" (UID: \"2ff644ad-bdd2-4306-918d-fe5a0bfbc964\") " pod="openshift-marketplace/redhat-operators-54924" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.862456 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ff644ad-bdd2-4306-918d-fe5a0bfbc964-utilities\") pod \"redhat-operators-54924\" (UID: \"2ff644ad-bdd2-4306-918d-fe5a0bfbc964\") " pod="openshift-marketplace/redhat-operators-54924" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.882115 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6jrn\" (UniqueName: \"kubernetes.io/projected/2ff644ad-bdd2-4306-918d-fe5a0bfbc964-kube-api-access-m6jrn\") pod \"redhat-operators-54924\" (UID: \"2ff644ad-bdd2-4306-918d-fe5a0bfbc964\") " pod="openshift-marketplace/redhat-operators-54924" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.901394 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dr5fh"] Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.922952 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvw8n" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.954568 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-54924" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.955183 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.955504 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c6c775a-ae4c-4682-97fd-5f9e4457f8fd-catalog-content\") pod \"redhat-operators-bxvz7\" (UID: \"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd\") " pod="openshift-marketplace/redhat-operators-bxvz7" Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.955550 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kcm9\" (UniqueName: \"kubernetes.io/projected/7c6c775a-ae4c-4682-97fd-5f9e4457f8fd-kube-api-access-8kcm9\") pod \"redhat-operators-bxvz7\" (UID: \"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd\") " pod="openshift-marketplace/redhat-operators-bxvz7" Dec 04 17:30:52 crc kubenswrapper[4631]: E1204 17:30:52.955582 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:53.455562368 +0000 UTC m=+183.487804366 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:52 crc kubenswrapper[4631]: I1204 17:30:52.955633 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c6c775a-ae4c-4682-97fd-5f9e4457f8fd-utilities\") pod \"redhat-operators-bxvz7\" (UID: \"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd\") " pod="openshift-marketplace/redhat-operators-bxvz7" Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.028848 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs" Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.056594 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c6c775a-ae4c-4682-97fd-5f9e4457f8fd-catalog-content\") pod \"redhat-operators-bxvz7\" (UID: \"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd\") " pod="openshift-marketplace/redhat-operators-bxvz7" Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.056648 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.056693 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kcm9\" (UniqueName: \"kubernetes.io/projected/7c6c775a-ae4c-4682-97fd-5f9e4457f8fd-kube-api-access-8kcm9\") pod \"redhat-operators-bxvz7\" (UID: \"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd\") " pod="openshift-marketplace/redhat-operators-bxvz7" Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.056732 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c6c775a-ae4c-4682-97fd-5f9e4457f8fd-utilities\") pod \"redhat-operators-bxvz7\" (UID: \"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd\") " pod="openshift-marketplace/redhat-operators-bxvz7" Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.058114 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c6c775a-ae4c-4682-97fd-5f9e4457f8fd-catalog-content\") pod \"redhat-operators-bxvz7\" (UID: \"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd\") " pod="openshift-marketplace/redhat-operators-bxvz7" Dec 04 17:30:53 crc kubenswrapper[4631]: E1204 17:30:53.058340 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:53.558329322 +0000 UTC m=+183.590571320 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.058683 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c6c775a-ae4c-4682-97fd-5f9e4457f8fd-utilities\") pod \"redhat-operators-bxvz7\" (UID: \"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd\") " pod="openshift-marketplace/redhat-operators-bxvz7" Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.088404 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kcm9\" (UniqueName: \"kubernetes.io/projected/7c6c775a-ae4c-4682-97fd-5f9e4457f8fd-kube-api-access-8kcm9\") pod \"redhat-operators-bxvz7\" (UID: \"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd\") " pod="openshift-marketplace/redhat-operators-bxvz7" Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.158849 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:53 crc kubenswrapper[4631]: E1204 17:30:53.159027 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:53.659000237 +0000 UTC m=+183.691242235 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.159339 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mhlqn\" (UniqueName: \"kubernetes.io/projected/4212359b-081e-4b11-8ca7-87cb9ff33a1c-kube-api-access-mhlqn\") pod \"4212359b-081e-4b11-8ca7-87cb9ff33a1c\" (UID: \"4212359b-081e-4b11-8ca7-87cb9ff33a1c\") " Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.159393 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4212359b-081e-4b11-8ca7-87cb9ff33a1c-secret-volume\") pod \"4212359b-081e-4b11-8ca7-87cb9ff33a1c\" (UID: \"4212359b-081e-4b11-8ca7-87cb9ff33a1c\") " Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.159425 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4212359b-081e-4b11-8ca7-87cb9ff33a1c-config-volume\") pod \"4212359b-081e-4b11-8ca7-87cb9ff33a1c\" (UID: \"4212359b-081e-4b11-8ca7-87cb9ff33a1c\") " Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.159684 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:53 crc kubenswrapper[4631]: E1204 17:30:53.160014 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:53.660001756 +0000 UTC m=+183.692243754 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.161017 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4212359b-081e-4b11-8ca7-87cb9ff33a1c-config-volume" (OuterVolumeSpecName: "config-volume") pod "4212359b-081e-4b11-8ca7-87cb9ff33a1c" (UID: "4212359b-081e-4b11-8ca7-87cb9ff33a1c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.165753 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4212359b-081e-4b11-8ca7-87cb9ff33a1c-kube-api-access-mhlqn" (OuterVolumeSpecName: "kube-api-access-mhlqn") pod "4212359b-081e-4b11-8ca7-87cb9ff33a1c" (UID: "4212359b-081e-4b11-8ca7-87cb9ff33a1c"). InnerVolumeSpecName "kube-api-access-mhlqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.172564 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4212359b-081e-4b11-8ca7-87cb9ff33a1c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4212359b-081e-4b11-8ca7-87cb9ff33a1c" (UID: "4212359b-081e-4b11-8ca7-87cb9ff33a1c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.172876 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bxvz7" Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.221323 4631 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.261119 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.261583 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mhlqn\" (UniqueName: \"kubernetes.io/projected/4212359b-081e-4b11-8ca7-87cb9ff33a1c-kube-api-access-mhlqn\") on node \"crc\" DevicePath \"\"" Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.261602 4631 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4212359b-081e-4b11-8ca7-87cb9ff33a1c-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.261617 4631 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4212359b-081e-4b11-8ca7-87cb9ff33a1c-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 17:30:53 crc kubenswrapper[4631]: E1204 17:30:53.261693 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:53.761675389 +0000 UTC m=+183.793917387 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.363217 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:53 crc kubenswrapper[4631]: E1204 17:30:53.365127 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:53.865113762 +0000 UTC m=+183.897355760 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.444323 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-54924"] Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.473934 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:53 crc kubenswrapper[4631]: E1204 17:30:53.474411 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:53.974396605 +0000 UTC m=+184.006638603 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.576251 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:53 crc kubenswrapper[4631]: E1204 17:30:53.576856 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:54.07682065 +0000 UTC m=+184.109062688 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.635966 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bxvz7"] Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.667243 4631 patch_prober.go:28] interesting pod/router-default-5444994796-glqcf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 17:30:53 crc kubenswrapper[4631]: [-]has-synced failed: reason withheld Dec 04 17:30:53 crc kubenswrapper[4631]: [+]process-running ok Dec 04 17:30:53 crc kubenswrapper[4631]: healthz check failed Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.667289 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bmh96" event={"ID":"e633bf80-04ad-4770-abc5-5d453077543c","Type":"ContainerStarted","Data":"cb0add41048cb2f19877f93ebb7bc819da3d6d4391abc58bde21eea66cb1daf7"} Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.667290 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glqcf" podUID="db00623b-8c6a-45d1-ab8b-a7e4f81f64eb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.673541 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nx6gn" event={"ID":"29451954-9708-42c3-87aa-e28d06a5c640","Type":"ContainerStarted","Data":"9847af89e7a30b4ca903117249fd363ad289b58e1a43c7667250a155dc427cc6"} Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.673582 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nx6gn" event={"ID":"29451954-9708-42c3-87aa-e28d06a5c640","Type":"ContainerStarted","Data":"bebb6a658d63134aee4940514501c0dfebde56a72886f6168d3185325aa9356c"} Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.675903 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a9332857-9c9d-41da-a671-c246f0b14252","Type":"ContainerStarted","Data":"4c78d99000248dcd2e5b95e9f6ca09095fe66f1e850e99794afc258c3085caab"} Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.677913 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:53 crc kubenswrapper[4631]: E1204 17:30:53.678133 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:54.178116182 +0000 UTC m=+184.210358180 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.679615 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bvzck" event={"ID":"5622ec39-e11e-44c2-b059-47d6fc091328","Type":"ContainerStarted","Data":"f5f0f80630b8839681738701a9a1f37754e90835dcbf0d2c2f6bf46f13c1d8f4"} Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.681158 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l85pg" event={"ID":"5cb73c44-e995-4e73-9bd3-422c00633ddf","Type":"ContainerStarted","Data":"1b7c5e8d67d0de5a953d8f1de0e0c9f807b981693ecf440796bedaeea987a8e9"} Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.691351 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs" Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.691992 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs" event={"ID":"4212359b-081e-4b11-8ca7-87cb9ff33a1c","Type":"ContainerDied","Data":"73f8d8eb60dc9d5f74cb293a3e040663555483259a51a1c481ed5760768e399b"} Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.692035 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="73f8d8eb60dc9d5f74cb293a3e040663555483259a51a1c481ed5760768e399b" Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.695274 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" event={"ID":"200bbd12-8133-46c5-a69f-ed2dd0b5e191","Type":"ContainerStarted","Data":"0447479c9af33412c667060d6782705f17e2e2700385f61cad39c01d269bfdae"} Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.697085 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54924" event={"ID":"2ff644ad-bdd2-4306-918d-fe5a0bfbc964","Type":"ContainerStarted","Data":"7724f8fc774de1812a1a442c164c257dc5ce42c5fdaf17eed20f65fe2f466b2f"} Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.708410 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxvz7" event={"ID":"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd","Type":"ContainerStarted","Data":"479421fb35d9c0198e7990411b9f8af223055d019b9ca1f1e9f2b32521560e69"} Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.714972 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" event={"ID":"30620276-02bb-4cea-a50e-36fc7d4689ae","Type":"ContainerStarted","Data":"e37228196c19db8d263315120da65de86512c36966e78029c700e0e2850baa27"} Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.730619 4631 generic.go:334] "Generic (PLEG): container finished" podID="885a0356-3029-482f-b3b5-3caa01e19c62" containerID="4df710cbb8fbf14e427d45ac8f9f634470cfe5209e7221d71fde297eb2064ec6" exitCode=0 Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.730988 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9bhz" event={"ID":"885a0356-3029-482f-b3b5-3caa01e19c62","Type":"ContainerDied","Data":"4df710cbb8fbf14e427d45ac8f9f634470cfe5209e7221d71fde297eb2064ec6"} Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.738475 4631 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.739976 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dr5fh" event={"ID":"63aa12c5-8868-471f-929f-df3697cc07ae","Type":"ContainerStarted","Data":"633189f10e04b150dc60946f6981efcd624c47566b3589d6ec327dc85c3c64c2"} Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.740023 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dr5fh" event={"ID":"63aa12c5-8868-471f-929f-df3697cc07ae","Type":"ContainerStarted","Data":"cd539170e1bcba7b73a8c696984b5b61ed390088e90bec54b8bf572f2ad440ae"} Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.779851 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:53 crc kubenswrapper[4631]: E1204 17:30:53.780226 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:54.280215557 +0000 UTC m=+184.312457545 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.881638 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:53 crc kubenswrapper[4631]: E1204 17:30:53.882449 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-04 17:30:54.382420856 +0000 UTC m=+184.414662854 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:53 crc kubenswrapper[4631]: I1204 17:30:53.983930 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:53 crc kubenswrapper[4631]: E1204 17:30:53.984290 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-04 17:30:54.484278044 +0000 UTC m=+184.516520042 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m8hln" (UID: "52770a8d-d215-4fa8-8469-95a315e44850") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.031932 4631 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-04T17:30:53.221347745Z","Handler":null,"Name":""} Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.053165 4631 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.053218 4631 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.084890 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.104137 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.186472 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.240887 4631 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.240947 4631 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.247298 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.250440 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 04 17:30:54 crc kubenswrapper[4631]: E1204 17:30:54.250757 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4212359b-081e-4b11-8ca7-87cb9ff33a1c" containerName="collect-profiles" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.250786 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="4212359b-081e-4b11-8ca7-87cb9ff33a1c" containerName="collect-profiles" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.250898 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="4212359b-081e-4b11-8ca7-87cb9ff33a1c" containerName="collect-profiles" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.251299 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.253661 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.253930 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.262221 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.262608 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.262609 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.388543 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a1e040b1-dd65-4400-a15d-b0ebd0715474-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a1e040b1-dd65-4400-a15d-b0ebd0715474\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.388687 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a1e040b1-dd65-4400-a15d-b0ebd0715474-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a1e040b1-dd65-4400-a15d-b0ebd0715474\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.444473 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m8hln\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.459940 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.489770 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a1e040b1-dd65-4400-a15d-b0ebd0715474-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a1e040b1-dd65-4400-a15d-b0ebd0715474\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.489878 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a1e040b1-dd65-4400-a15d-b0ebd0715474-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a1e040b1-dd65-4400-a15d-b0ebd0715474\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.489954 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a1e040b1-dd65-4400-a15d-b0ebd0715474-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a1e040b1-dd65-4400-a15d-b0ebd0715474\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.517455 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a1e040b1-dd65-4400-a15d-b0ebd0715474-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a1e040b1-dd65-4400-a15d-b0ebd0715474\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.566976 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.622870 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.631557 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.666753 4631 patch_prober.go:28] interesting pod/router-default-5444994796-glqcf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 17:30:54 crc kubenswrapper[4631]: [-]has-synced failed: reason withheld Dec 04 17:30:54 crc kubenswrapper[4631]: [+]process-running ok Dec 04 17:30:54 crc kubenswrapper[4631]: healthz check failed Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.667028 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glqcf" podUID="db00623b-8c6a-45d1-ab8b-a7e4f81f64eb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.755832 4631 generic.go:334] "Generic (PLEG): container finished" podID="5cb73c44-e995-4e73-9bd3-422c00633ddf" containerID="1b7c5e8d67d0de5a953d8f1de0e0c9f807b981693ecf440796bedaeea987a8e9" exitCode=0 Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.755903 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l85pg" event={"ID":"5cb73c44-e995-4e73-9bd3-422c00633ddf","Type":"ContainerDied","Data":"1b7c5e8d67d0de5a953d8f1de0e0c9f807b981693ecf440796bedaeea987a8e9"} Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.757999 4631 generic.go:334] "Generic (PLEG): container finished" podID="e633bf80-04ad-4770-abc5-5d453077543c" containerID="cb0add41048cb2f19877f93ebb7bc819da3d6d4391abc58bde21eea66cb1daf7" exitCode=0 Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.758034 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bmh96" event={"ID":"e633bf80-04ad-4770-abc5-5d453077543c","Type":"ContainerDied","Data":"cb0add41048cb2f19877f93ebb7bc819da3d6d4391abc58bde21eea66cb1daf7"} Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.759700 4631 generic.go:334] "Generic (PLEG): container finished" podID="29451954-9708-42c3-87aa-e28d06a5c640" containerID="9847af89e7a30b4ca903117249fd363ad289b58e1a43c7667250a155dc427cc6" exitCode=0 Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.759735 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nx6gn" event={"ID":"29451954-9708-42c3-87aa-e28d06a5c640","Type":"ContainerDied","Data":"9847af89e7a30b4ca903117249fd363ad289b58e1a43c7667250a155dc427cc6"} Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.762198 4631 generic.go:334] "Generic (PLEG): container finished" podID="5622ec39-e11e-44c2-b059-47d6fc091328" containerID="f5f0f80630b8839681738701a9a1f37754e90835dcbf0d2c2f6bf46f13c1d8f4" exitCode=0 Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.762235 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bvzck" event={"ID":"5622ec39-e11e-44c2-b059-47d6fc091328","Type":"ContainerDied","Data":"f5f0f80630b8839681738701a9a1f37754e90835dcbf0d2c2f6bf46f13c1d8f4"} Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.766019 4631 generic.go:334] "Generic (PLEG): container finished" podID="63aa12c5-8868-471f-929f-df3697cc07ae" containerID="633189f10e04b150dc60946f6981efcd624c47566b3589d6ec327dc85c3c64c2" exitCode=0 Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.766396 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dr5fh" event={"ID":"63aa12c5-8868-471f-929f-df3697cc07ae","Type":"ContainerDied","Data":"633189f10e04b150dc60946f6981efcd624c47566b3589d6ec327dc85c3c64c2"} Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.776591 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jn87j" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.857644 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" podStartSLOduration=157.8576244 podStartE2EDuration="2m37.8576244s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:30:54.855932981 +0000 UTC m=+184.888174989" watchObservedRunningTime="2025-12-04 17:30:54.8576244 +0000 UTC m=+184.889866388" Dec 04 17:30:54 crc kubenswrapper[4631]: I1204 17:30:54.879050 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 04 17:30:55 crc kubenswrapper[4631]: I1204 17:30:55.008328 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m8hln"] Dec 04 17:30:55 crc kubenswrapper[4631]: I1204 17:30:55.666397 4631 patch_prober.go:28] interesting pod/router-default-5444994796-glqcf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 17:30:55 crc kubenswrapper[4631]: [-]has-synced failed: reason withheld Dec 04 17:30:55 crc kubenswrapper[4631]: [+]process-running ok Dec 04 17:30:55 crc kubenswrapper[4631]: healthz check failed Dec 04 17:30:55 crc kubenswrapper[4631]: I1204 17:30:55.666791 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glqcf" podUID="db00623b-8c6a-45d1-ab8b-a7e4f81f64eb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 17:30:55 crc kubenswrapper[4631]: I1204 17:30:55.772629 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a1e040b1-dd65-4400-a15d-b0ebd0715474","Type":"ContainerStarted","Data":"aaf19953a1b834ec8de77c1152833fa9e31b57d0d78fb5111ec5db0cd81f357a"} Dec 04 17:30:55 crc kubenswrapper[4631]: I1204 17:30:55.774467 4631 generic.go:334] "Generic (PLEG): container finished" podID="2ff644ad-bdd2-4306-918d-fe5a0bfbc964" containerID="634a43bc8db0d8833b2ebf3cc0ca18082041fb28a19e9d3b37ac33845abdf675" exitCode=0 Dec 04 17:30:55 crc kubenswrapper[4631]: I1204 17:30:55.774533 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54924" event={"ID":"2ff644ad-bdd2-4306-918d-fe5a0bfbc964","Type":"ContainerDied","Data":"634a43bc8db0d8833b2ebf3cc0ca18082041fb28a19e9d3b37ac33845abdf675"} Dec 04 17:30:55 crc kubenswrapper[4631]: I1204 17:30:55.778326 4631 generic.go:334] "Generic (PLEG): container finished" podID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" containerID="a8c8c6cc3f883c80dbff5171aeb66f2aef48a8e2faf890d8e222831c1a37f736" exitCode=0 Dec 04 17:30:55 crc kubenswrapper[4631]: I1204 17:30:55.778413 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxvz7" event={"ID":"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd","Type":"ContainerDied","Data":"a8c8c6cc3f883c80dbff5171aeb66f2aef48a8e2faf890d8e222831c1a37f736"} Dec 04 17:30:55 crc kubenswrapper[4631]: I1204 17:30:55.780260 4631 generic.go:334] "Generic (PLEG): container finished" podID="a9332857-9c9d-41da-a671-c246f0b14252" containerID="4c78d99000248dcd2e5b95e9f6ca09095fe66f1e850e99794afc258c3085caab" exitCode=0 Dec 04 17:30:55 crc kubenswrapper[4631]: I1204 17:30:55.780341 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a9332857-9c9d-41da-a671-c246f0b14252","Type":"ContainerDied","Data":"4c78d99000248dcd2e5b95e9f6ca09095fe66f1e850e99794afc258c3085caab"} Dec 04 17:30:55 crc kubenswrapper[4631]: I1204 17:30:55.782356 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" event={"ID":"52770a8d-d215-4fa8-8469-95a315e44850","Type":"ContainerStarted","Data":"25a06f8489dc0101d6da727452f3797101e1ea34a5811233e19b87887117adbd"} Dec 04 17:30:56 crc kubenswrapper[4631]: I1204 17:30:56.667326 4631 patch_prober.go:28] interesting pod/router-default-5444994796-glqcf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 17:30:56 crc kubenswrapper[4631]: [-]has-synced failed: reason withheld Dec 04 17:30:56 crc kubenswrapper[4631]: [+]process-running ok Dec 04 17:30:56 crc kubenswrapper[4631]: healthz check failed Dec 04 17:30:56 crc kubenswrapper[4631]: I1204 17:30:56.667838 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glqcf" podUID="db00623b-8c6a-45d1-ab8b-a7e4f81f64eb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 17:30:56 crc kubenswrapper[4631]: I1204 17:30:56.789043 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" event={"ID":"52770a8d-d215-4fa8-8469-95a315e44850","Type":"ContainerStarted","Data":"0096b18d52a63440b7ba79eb2e870703ec2d2cacb727666a50405b0362a80bdf"} Dec 04 17:30:56 crc kubenswrapper[4631]: I1204 17:30:56.973934 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 17:30:57 crc kubenswrapper[4631]: I1204 17:30:57.135329 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a9332857-9c9d-41da-a671-c246f0b14252-kubelet-dir\") pod \"a9332857-9c9d-41da-a671-c246f0b14252\" (UID: \"a9332857-9c9d-41da-a671-c246f0b14252\") " Dec 04 17:30:57 crc kubenswrapper[4631]: I1204 17:30:57.135453 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9332857-9c9d-41da-a671-c246f0b14252-kube-api-access\") pod \"a9332857-9c9d-41da-a671-c246f0b14252\" (UID: \"a9332857-9c9d-41da-a671-c246f0b14252\") " Dec 04 17:30:57 crc kubenswrapper[4631]: I1204 17:30:57.139486 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a9332857-9c9d-41da-a671-c246f0b14252-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a9332857-9c9d-41da-a671-c246f0b14252" (UID: "a9332857-9c9d-41da-a671-c246f0b14252"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:30:57 crc kubenswrapper[4631]: I1204 17:30:57.159630 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9332857-9c9d-41da-a671-c246f0b14252-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a9332857-9c9d-41da-a671-c246f0b14252" (UID: "a9332857-9c9d-41da-a671-c246f0b14252"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:30:57 crc kubenswrapper[4631]: I1204 17:30:57.236514 4631 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a9332857-9c9d-41da-a671-c246f0b14252-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 04 17:30:57 crc kubenswrapper[4631]: I1204 17:30:57.236543 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9332857-9c9d-41da-a671-c246f0b14252-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 17:30:57 crc kubenswrapper[4631]: I1204 17:30:57.667067 4631 patch_prober.go:28] interesting pod/router-default-5444994796-glqcf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 17:30:57 crc kubenswrapper[4631]: [-]has-synced failed: reason withheld Dec 04 17:30:57 crc kubenswrapper[4631]: [+]process-running ok Dec 04 17:30:57 crc kubenswrapper[4631]: healthz check failed Dec 04 17:30:57 crc kubenswrapper[4631]: I1204 17:30:57.667489 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glqcf" podUID="db00623b-8c6a-45d1-ab8b-a7e4f81f64eb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 17:30:57 crc kubenswrapper[4631]: I1204 17:30:57.802577 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a1e040b1-dd65-4400-a15d-b0ebd0715474","Type":"ContainerStarted","Data":"0821636ee12f2888c714bc2a6675929300f6f43fcf994ae999e8f694ce4a857e"} Dec 04 17:30:57 crc kubenswrapper[4631]: I1204 17:30:57.804630 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a9332857-9c9d-41da-a671-c246f0b14252","Type":"ContainerDied","Data":"27ad7a84df7e8f67fee38b969b746a853fd9eb10600780478911bbe3e1935352"} Dec 04 17:30:57 crc kubenswrapper[4631]: I1204 17:30:57.804676 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27ad7a84df7e8f67fee38b969b746a853fd9eb10600780478911bbe3e1935352" Dec 04 17:30:57 crc kubenswrapper[4631]: I1204 17:30:57.804736 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 04 17:30:57 crc kubenswrapper[4631]: I1204 17:30:57.818166 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" event={"ID":"200bbd12-8133-46c5-a69f-ed2dd0b5e191","Type":"ContainerStarted","Data":"7ce4d75f520389a80f5e23b5dc1b2dfe604ed23ecfa10e3b2c5b1d27cfcfc7e9"} Dec 04 17:30:57 crc kubenswrapper[4631]: I1204 17:30:57.833314 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-h5fb2" Dec 04 17:30:58 crc kubenswrapper[4631]: I1204 17:30:58.471590 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-g65qn" Dec 04 17:30:58 crc kubenswrapper[4631]: I1204 17:30:58.479653 4631 patch_prober.go:28] interesting pod/console-f9d7485db-kl45g container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.40:8443/health\": dial tcp 10.217.0.40:8443: connect: connection refused" start-of-body= Dec 04 17:30:58 crc kubenswrapper[4631]: I1204 17:30:58.479717 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-kl45g" podUID="81559ff3-95e9-455f-9d90-46c5f1a981ce" containerName="console" probeResult="failure" output="Get \"https://10.217.0.40:8443/health\": dial tcp 10.217.0.40:8443: connect: connection refused" Dec 04 17:30:58 crc kubenswrapper[4631]: I1204 17:30:58.643826 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:58 crc kubenswrapper[4631]: I1204 17:30:58.643894 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:58 crc kubenswrapper[4631]: I1204 17:30:58.649694 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:58 crc kubenswrapper[4631]: I1204 17:30:58.665922 4631 patch_prober.go:28] interesting pod/router-default-5444994796-glqcf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 17:30:58 crc kubenswrapper[4631]: [-]has-synced failed: reason withheld Dec 04 17:30:58 crc kubenswrapper[4631]: [+]process-running ok Dec 04 17:30:58 crc kubenswrapper[4631]: healthz check failed Dec 04 17:30:58 crc kubenswrapper[4631]: I1204 17:30:58.665985 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glqcf" podUID="db00623b-8c6a-45d1-ab8b-a7e4f81f64eb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 17:30:58 crc kubenswrapper[4631]: I1204 17:30:58.831361 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-hc4kn" Dec 04 17:30:59 crc kubenswrapper[4631]: I1204 17:30:59.666650 4631 patch_prober.go:28] interesting pod/router-default-5444994796-glqcf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 17:30:59 crc kubenswrapper[4631]: [-]has-synced failed: reason withheld Dec 04 17:30:59 crc kubenswrapper[4631]: [+]process-running ok Dec 04 17:30:59 crc kubenswrapper[4631]: healthz check failed Dec 04 17:30:59 crc kubenswrapper[4631]: I1204 17:30:59.666704 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glqcf" podUID="db00623b-8c6a-45d1-ab8b-a7e4f81f64eb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 17:30:59 crc kubenswrapper[4631]: I1204 17:30:59.818525 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" Dec 04 17:31:00 crc kubenswrapper[4631]: I1204 17:31:00.665381 4631 patch_prober.go:28] interesting pod/router-default-5444994796-glqcf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 17:31:00 crc kubenswrapper[4631]: [-]has-synced failed: reason withheld Dec 04 17:31:00 crc kubenswrapper[4631]: [+]process-running ok Dec 04 17:31:00 crc kubenswrapper[4631]: healthz check failed Dec 04 17:31:00 crc kubenswrapper[4631]: I1204 17:31:00.665437 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glqcf" podUID="db00623b-8c6a-45d1-ab8b-a7e4f81f64eb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 17:31:01 crc kubenswrapper[4631]: I1204 17:31:01.666197 4631 patch_prober.go:28] interesting pod/router-default-5444994796-glqcf container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 04 17:31:01 crc kubenswrapper[4631]: [-]has-synced failed: reason withheld Dec 04 17:31:01 crc kubenswrapper[4631]: [+]process-running ok Dec 04 17:31:01 crc kubenswrapper[4631]: healthz check failed Dec 04 17:31:01 crc kubenswrapper[4631]: I1204 17:31:01.666257 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-glqcf" podUID="db00623b-8c6a-45d1-ab8b-a7e4f81f64eb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 04 17:31:02 crc kubenswrapper[4631]: I1204 17:31:02.669143 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:31:02 crc kubenswrapper[4631]: I1204 17:31:02.674267 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-glqcf" Dec 04 17:31:04 crc kubenswrapper[4631]: I1204 17:31:04.899907 4631 generic.go:334] "Generic (PLEG): container finished" podID="a1e040b1-dd65-4400-a15d-b0ebd0715474" containerID="0821636ee12f2888c714bc2a6675929300f6f43fcf994ae999e8f694ce4a857e" exitCode=0 Dec 04 17:31:04 crc kubenswrapper[4631]: I1204 17:31:04.901203 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a1e040b1-dd65-4400-a15d-b0ebd0715474","Type":"ContainerDied","Data":"0821636ee12f2888c714bc2a6675929300f6f43fcf994ae999e8f694ce4a857e"} Dec 04 17:31:04 crc kubenswrapper[4631]: I1204 17:31:04.901937 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:31:04 crc kubenswrapper[4631]: I1204 17:31:04.960576 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" podStartSLOduration=167.960554851 podStartE2EDuration="2m47.960554851s" podCreationTimestamp="2025-12-04 17:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:31:04.959666325 +0000 UTC m=+194.991908333" watchObservedRunningTime="2025-12-04 17:31:04.960554851 +0000 UTC m=+194.992796859" Dec 04 17:31:04 crc kubenswrapper[4631]: I1204 17:31:04.962788 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-mdxjh" podStartSLOduration=28.962756675 podStartE2EDuration="28.962756675s" podCreationTimestamp="2025-12-04 17:30:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:31:04.934597932 +0000 UTC m=+194.966839940" watchObservedRunningTime="2025-12-04 17:31:04.962756675 +0000 UTC m=+194.994998673" Dec 04 17:31:06 crc kubenswrapper[4631]: I1204 17:31:06.023325 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:31:06 crc kubenswrapper[4631]: I1204 17:31:06.023443 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:31:08 crc kubenswrapper[4631]: I1204 17:31:08.482937 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:31:08 crc kubenswrapper[4631]: I1204 17:31:08.488001 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:31:14 crc kubenswrapper[4631]: I1204 17:31:14.800738 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:31:19 crc kubenswrapper[4631]: I1204 17:31:19.417452 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kkczr" Dec 04 17:31:28 crc kubenswrapper[4631]: I1204 17:31:28.015504 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 17:31:28 crc kubenswrapper[4631]: I1204 17:31:28.066667 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a1e040b1-dd65-4400-a15d-b0ebd0715474","Type":"ContainerDied","Data":"aaf19953a1b834ec8de77c1152833fa9e31b57d0d78fb5111ec5db0cd81f357a"} Dec 04 17:31:28 crc kubenswrapper[4631]: I1204 17:31:28.066709 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aaf19953a1b834ec8de77c1152833fa9e31b57d0d78fb5111ec5db0cd81f357a" Dec 04 17:31:28 crc kubenswrapper[4631]: I1204 17:31:28.066764 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 04 17:31:28 crc kubenswrapper[4631]: I1204 17:31:28.149021 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a1e040b1-dd65-4400-a15d-b0ebd0715474-kube-api-access\") pod \"a1e040b1-dd65-4400-a15d-b0ebd0715474\" (UID: \"a1e040b1-dd65-4400-a15d-b0ebd0715474\") " Dec 04 17:31:28 crc kubenswrapper[4631]: I1204 17:31:28.149207 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a1e040b1-dd65-4400-a15d-b0ebd0715474-kubelet-dir\") pod \"a1e040b1-dd65-4400-a15d-b0ebd0715474\" (UID: \"a1e040b1-dd65-4400-a15d-b0ebd0715474\") " Dec 04 17:31:28 crc kubenswrapper[4631]: I1204 17:31:28.149427 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a1e040b1-dd65-4400-a15d-b0ebd0715474-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a1e040b1-dd65-4400-a15d-b0ebd0715474" (UID: "a1e040b1-dd65-4400-a15d-b0ebd0715474"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:31:28 crc kubenswrapper[4631]: I1204 17:31:28.169127 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1e040b1-dd65-4400-a15d-b0ebd0715474-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a1e040b1-dd65-4400-a15d-b0ebd0715474" (UID: "a1e040b1-dd65-4400-a15d-b0ebd0715474"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:31:28 crc kubenswrapper[4631]: I1204 17:31:28.250479 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a1e040b1-dd65-4400-a15d-b0ebd0715474-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 17:31:28 crc kubenswrapper[4631]: I1204 17:31:28.250516 4631 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a1e040b1-dd65-4400-a15d-b0ebd0715474-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 04 17:31:33 crc kubenswrapper[4631]: I1204 17:31:33.642656 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 04 17:31:33 crc kubenswrapper[4631]: E1204 17:31:33.643685 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1e040b1-dd65-4400-a15d-b0ebd0715474" containerName="pruner" Dec 04 17:31:33 crc kubenswrapper[4631]: I1204 17:31:33.643698 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1e040b1-dd65-4400-a15d-b0ebd0715474" containerName="pruner" Dec 04 17:31:33 crc kubenswrapper[4631]: E1204 17:31:33.643724 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9332857-9c9d-41da-a671-c246f0b14252" containerName="pruner" Dec 04 17:31:33 crc kubenswrapper[4631]: I1204 17:31:33.643732 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9332857-9c9d-41da-a671-c246f0b14252" containerName="pruner" Dec 04 17:31:33 crc kubenswrapper[4631]: I1204 17:31:33.643836 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9332857-9c9d-41da-a671-c246f0b14252" containerName="pruner" Dec 04 17:31:33 crc kubenswrapper[4631]: I1204 17:31:33.643848 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1e040b1-dd65-4400-a15d-b0ebd0715474" containerName="pruner" Dec 04 17:31:33 crc kubenswrapper[4631]: I1204 17:31:33.644291 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 17:31:33 crc kubenswrapper[4631]: I1204 17:31:33.646029 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 04 17:31:33 crc kubenswrapper[4631]: I1204 17:31:33.649489 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 04 17:31:33 crc kubenswrapper[4631]: I1204 17:31:33.658144 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 04 17:31:33 crc kubenswrapper[4631]: I1204 17:31:33.739951 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ed2489f4-3f20-4fac-bfcd-ba93cf109860-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ed2489f4-3f20-4fac-bfcd-ba93cf109860\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 17:31:33 crc kubenswrapper[4631]: I1204 17:31:33.740540 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ed2489f4-3f20-4fac-bfcd-ba93cf109860-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ed2489f4-3f20-4fac-bfcd-ba93cf109860\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 17:31:33 crc kubenswrapper[4631]: I1204 17:31:33.842228 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ed2489f4-3f20-4fac-bfcd-ba93cf109860-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ed2489f4-3f20-4fac-bfcd-ba93cf109860\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 17:31:33 crc kubenswrapper[4631]: I1204 17:31:33.842292 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ed2489f4-3f20-4fac-bfcd-ba93cf109860-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ed2489f4-3f20-4fac-bfcd-ba93cf109860\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 17:31:33 crc kubenswrapper[4631]: I1204 17:31:33.842393 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ed2489f4-3f20-4fac-bfcd-ba93cf109860-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ed2489f4-3f20-4fac-bfcd-ba93cf109860\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 17:31:33 crc kubenswrapper[4631]: I1204 17:31:33.865120 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ed2489f4-3f20-4fac-bfcd-ba93cf109860-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ed2489f4-3f20-4fac-bfcd-ba93cf109860\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 17:31:33 crc kubenswrapper[4631]: I1204 17:31:33.984419 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 17:31:36 crc kubenswrapper[4631]: I1204 17:31:36.023442 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:31:36 crc kubenswrapper[4631]: I1204 17:31:36.024809 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:31:36 crc kubenswrapper[4631]: I1204 17:31:36.024967 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:31:36 crc kubenswrapper[4631]: I1204 17:31:36.026161 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd"} pod="openshift-machine-config-operator/machine-config-daemon-q27wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 17:31:36 crc kubenswrapper[4631]: I1204 17:31:36.026399 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" containerID="cri-o://8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd" gracePeriod=600 Dec 04 17:31:38 crc kubenswrapper[4631]: I1204 17:31:38.440625 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 04 17:31:38 crc kubenswrapper[4631]: I1204 17:31:38.441833 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 04 17:31:38 crc kubenswrapper[4631]: I1204 17:31:38.456157 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 04 17:31:38 crc kubenswrapper[4631]: I1204 17:31:38.512777 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8fc9fdca-bb86-41d3-a800-2996fdeea0fd-var-lock\") pod \"installer-9-crc\" (UID: \"8fc9fdca-bb86-41d3-a800-2996fdeea0fd\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 17:31:38 crc kubenswrapper[4631]: I1204 17:31:38.513212 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8fc9fdca-bb86-41d3-a800-2996fdeea0fd-kubelet-dir\") pod \"installer-9-crc\" (UID: \"8fc9fdca-bb86-41d3-a800-2996fdeea0fd\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 17:31:38 crc kubenswrapper[4631]: I1204 17:31:38.513333 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8fc9fdca-bb86-41d3-a800-2996fdeea0fd-kube-api-access\") pod \"installer-9-crc\" (UID: \"8fc9fdca-bb86-41d3-a800-2996fdeea0fd\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 17:31:38 crc kubenswrapper[4631]: I1204 17:31:38.614466 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8fc9fdca-bb86-41d3-a800-2996fdeea0fd-kubelet-dir\") pod \"installer-9-crc\" (UID: \"8fc9fdca-bb86-41d3-a800-2996fdeea0fd\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 17:31:38 crc kubenswrapper[4631]: I1204 17:31:38.614513 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8fc9fdca-bb86-41d3-a800-2996fdeea0fd-kube-api-access\") pod \"installer-9-crc\" (UID: \"8fc9fdca-bb86-41d3-a800-2996fdeea0fd\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 17:31:38 crc kubenswrapper[4631]: I1204 17:31:38.614556 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8fc9fdca-bb86-41d3-a800-2996fdeea0fd-kubelet-dir\") pod \"installer-9-crc\" (UID: \"8fc9fdca-bb86-41d3-a800-2996fdeea0fd\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 17:31:38 crc kubenswrapper[4631]: I1204 17:31:38.614593 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8fc9fdca-bb86-41d3-a800-2996fdeea0fd-var-lock\") pod \"installer-9-crc\" (UID: \"8fc9fdca-bb86-41d3-a800-2996fdeea0fd\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 17:31:38 crc kubenswrapper[4631]: I1204 17:31:38.614568 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8fc9fdca-bb86-41d3-a800-2996fdeea0fd-var-lock\") pod \"installer-9-crc\" (UID: \"8fc9fdca-bb86-41d3-a800-2996fdeea0fd\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 17:31:38 crc kubenswrapper[4631]: I1204 17:31:38.639017 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8fc9fdca-bb86-41d3-a800-2996fdeea0fd-kube-api-access\") pod \"installer-9-crc\" (UID: \"8fc9fdca-bb86-41d3-a800-2996fdeea0fd\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 04 17:31:38 crc kubenswrapper[4631]: I1204 17:31:38.758582 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 04 17:31:48 crc kubenswrapper[4631]: I1204 17:31:48.161501 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerID="8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd" exitCode=0 Dec 04 17:31:48 crc kubenswrapper[4631]: I1204 17:31:48.161567 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerDied","Data":"8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd"} Dec 04 17:31:52 crc kubenswrapper[4631]: E1204 17:31:52.470472 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 04 17:31:52 crc kubenswrapper[4631]: E1204 17:31:52.471544 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fjlwh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-dr5fh_openshift-marketplace(63aa12c5-8868-471f-929f-df3697cc07ae): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 17:31:52 crc kubenswrapper[4631]: E1204 17:31:52.472892 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-dr5fh" podUID="63aa12c5-8868-471f-929f-df3697cc07ae" Dec 04 17:31:54 crc kubenswrapper[4631]: E1204 17:31:54.430052 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:ab0759cefd736e0c47c96de4c211a5787496e1bc417696f0f882bd26c59b72b8: Get \"https://registry.redhat.io/v2/redhat/redhat-operator-index/blobs/sha256:ab0759cefd736e0c47c96de4c211a5787496e1bc417696f0f882bd26c59b72b8\": context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 04 17:31:54 crc kubenswrapper[4631]: E1204 17:31:54.431512 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-m6jrn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-54924_openshift-marketplace(2ff644ad-bdd2-4306-918d-fe5a0bfbc964): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:ab0759cefd736e0c47c96de4c211a5787496e1bc417696f0f882bd26c59b72b8: Get \"https://registry.redhat.io/v2/redhat/redhat-operator-index/blobs/sha256:ab0759cefd736e0c47c96de4c211a5787496e1bc417696f0f882bd26c59b72b8\": context canceled" logger="UnhandledError" Dec 04 17:31:54 crc kubenswrapper[4631]: E1204 17:31:54.433605 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:ab0759cefd736e0c47c96de4c211a5787496e1bc417696f0f882bd26c59b72b8: Get \\\"https://registry.redhat.io/v2/redhat/redhat-operator-index/blobs/sha256:ab0759cefd736e0c47c96de4c211a5787496e1bc417696f0f882bd26c59b72b8\\\": context canceled\"" pod="openshift-marketplace/redhat-operators-54924" podUID="2ff644ad-bdd2-4306-918d-fe5a0bfbc964" Dec 04 17:31:54 crc kubenswrapper[4631]: E1204 17:31:54.496195 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 04 17:31:54 crc kubenswrapper[4631]: E1204 17:31:54.496481 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hcr6b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-bvzck_openshift-marketplace(5622ec39-e11e-44c2-b059-47d6fc091328): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 17:31:54 crc kubenswrapper[4631]: E1204 17:31:54.497592 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-bvzck" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" Dec 04 17:31:54 crc kubenswrapper[4631]: E1204 17:31:54.497591 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 04 17:31:54 crc kubenswrapper[4631]: E1204 17:31:54.497704 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b7gkq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-nx6gn_openshift-marketplace(29451954-9708-42c3-87aa-e28d06a5c640): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 17:31:54 crc kubenswrapper[4631]: E1204 17:31:54.499110 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-nx6gn" podUID="29451954-9708-42c3-87aa-e28d06a5c640" Dec 04 17:31:56 crc kubenswrapper[4631]: E1204 17:31:56.270803 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-bvzck" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" Dec 04 17:31:56 crc kubenswrapper[4631]: E1204 17:31:56.352616 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 04 17:31:56 crc kubenswrapper[4631]: E1204 17:31:56.352983 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pbc2n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-n9bhz_openshift-marketplace(885a0356-3029-482f-b3b5-3caa01e19c62): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 17:31:56 crc kubenswrapper[4631]: E1204 17:31:56.353773 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 04 17:31:56 crc kubenswrapper[4631]: E1204 17:31:56.353857 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-74nx8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-l85pg_openshift-marketplace(5cb73c44-e995-4e73-9bd3-422c00633ddf): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 17:31:56 crc kubenswrapper[4631]: E1204 17:31:56.354960 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-n9bhz" podUID="885a0356-3029-482f-b3b5-3caa01e19c62" Dec 04 17:31:56 crc kubenswrapper[4631]: E1204 17:31:56.355078 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-l85pg" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" Dec 04 17:31:56 crc kubenswrapper[4631]: E1204 17:31:56.380024 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 04 17:31:56 crc kubenswrapper[4631]: E1204 17:31:56.380264 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-snnrg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-bmh96_openshift-marketplace(e633bf80-04ad-4770-abc5-5d453077543c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 17:31:56 crc kubenswrapper[4631]: E1204 17:31:56.382416 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-bmh96" podUID="e633bf80-04ad-4770-abc5-5d453077543c" Dec 04 17:31:59 crc kubenswrapper[4631]: E1204 17:31:59.968118 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-n9bhz" podUID="885a0356-3029-482f-b3b5-3caa01e19c62" Dec 04 17:31:59 crc kubenswrapper[4631]: E1204 17:31:59.990919 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 04 17:31:59 crc kubenswrapper[4631]: E1204 17:31:59.991874 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8kcm9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-bxvz7_openshift-marketplace(7c6c775a-ae4c-4682-97fd-5f9e4457f8fd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 17:31:59 crc kubenswrapper[4631]: E1204 17:31:59.993112 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-bxvz7" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" Dec 04 17:32:00 crc kubenswrapper[4631]: I1204 17:32:00.247553 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"609d239612b670bcf642d521340a1cbeabb8e40268181c9747fe492989b6287c"} Dec 04 17:32:00 crc kubenswrapper[4631]: I1204 17:32:00.417017 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 04 17:32:00 crc kubenswrapper[4631]: I1204 17:32:00.477724 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 04 17:32:00 crc kubenswrapper[4631]: W1204 17:32:00.491757 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-poded2489f4_3f20_4fac_bfcd_ba93cf109860.slice/crio-41e189519f02570286d32f8131da26d63a9d5d1c968aeac949d98ef353555bf6 WatchSource:0}: Error finding container 41e189519f02570286d32f8131da26d63a9d5d1c968aeac949d98ef353555bf6: Status 404 returned error can't find the container with id 41e189519f02570286d32f8131da26d63a9d5d1c968aeac949d98ef353555bf6 Dec 04 17:32:01 crc kubenswrapper[4631]: I1204 17:32:01.246506 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8fc9fdca-bb86-41d3-a800-2996fdeea0fd","Type":"ContainerStarted","Data":"c8febd5b53f3bd77cf7bc8ff282eb5f25fc8ccb617017a7420ca00670fcde94c"} Dec 04 17:32:01 crc kubenswrapper[4631]: I1204 17:32:01.246951 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8fc9fdca-bb86-41d3-a800-2996fdeea0fd","Type":"ContainerStarted","Data":"bb89800d381992364bb060cdb6837a400797f88899febaba1be5be796e3f5bbc"} Dec 04 17:32:01 crc kubenswrapper[4631]: I1204 17:32:01.253471 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"ed2489f4-3f20-4fac-bfcd-ba93cf109860","Type":"ContainerStarted","Data":"99cec09899bb89197dd1e07ff4665f9f30aa2e04da57140f7f36fed15ca35322"} Dec 04 17:32:01 crc kubenswrapper[4631]: I1204 17:32:01.253523 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"ed2489f4-3f20-4fac-bfcd-ba93cf109860","Type":"ContainerStarted","Data":"41e189519f02570286d32f8131da26d63a9d5d1c968aeac949d98ef353555bf6"} Dec 04 17:32:01 crc kubenswrapper[4631]: I1204 17:32:01.284440 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=23.284104309 podStartE2EDuration="23.284104309s" podCreationTimestamp="2025-12-04 17:31:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:32:01.263353124 +0000 UTC m=+251.295595122" watchObservedRunningTime="2025-12-04 17:32:01.284104309 +0000 UTC m=+251.316346337" Dec 04 17:32:01 crc kubenswrapper[4631]: I1204 17:32:01.296812 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=28.29679275 podStartE2EDuration="28.29679275s" podCreationTimestamp="2025-12-04 17:31:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:32:01.294386829 +0000 UTC m=+251.326628847" watchObservedRunningTime="2025-12-04 17:32:01.29679275 +0000 UTC m=+251.329034738" Dec 04 17:32:03 crc kubenswrapper[4631]: I1204 17:32:03.263191 4631 generic.go:334] "Generic (PLEG): container finished" podID="ed2489f4-3f20-4fac-bfcd-ba93cf109860" containerID="99cec09899bb89197dd1e07ff4665f9f30aa2e04da57140f7f36fed15ca35322" exitCode=0 Dec 04 17:32:03 crc kubenswrapper[4631]: I1204 17:32:03.263280 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"ed2489f4-3f20-4fac-bfcd-ba93cf109860","Type":"ContainerDied","Data":"99cec09899bb89197dd1e07ff4665f9f30aa2e04da57140f7f36fed15ca35322"} Dec 04 17:32:04 crc kubenswrapper[4631]: I1204 17:32:04.524019 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 17:32:04 crc kubenswrapper[4631]: I1204 17:32:04.722594 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ed2489f4-3f20-4fac-bfcd-ba93cf109860-kube-api-access\") pod \"ed2489f4-3f20-4fac-bfcd-ba93cf109860\" (UID: \"ed2489f4-3f20-4fac-bfcd-ba93cf109860\") " Dec 04 17:32:04 crc kubenswrapper[4631]: I1204 17:32:04.722761 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ed2489f4-3f20-4fac-bfcd-ba93cf109860-kubelet-dir\") pod \"ed2489f4-3f20-4fac-bfcd-ba93cf109860\" (UID: \"ed2489f4-3f20-4fac-bfcd-ba93cf109860\") " Dec 04 17:32:04 crc kubenswrapper[4631]: I1204 17:32:04.723035 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ed2489f4-3f20-4fac-bfcd-ba93cf109860-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "ed2489f4-3f20-4fac-bfcd-ba93cf109860" (UID: "ed2489f4-3f20-4fac-bfcd-ba93cf109860"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:32:04 crc kubenswrapper[4631]: I1204 17:32:04.728388 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed2489f4-3f20-4fac-bfcd-ba93cf109860-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "ed2489f4-3f20-4fac-bfcd-ba93cf109860" (UID: "ed2489f4-3f20-4fac-bfcd-ba93cf109860"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:32:04 crc kubenswrapper[4631]: I1204 17:32:04.824125 4631 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ed2489f4-3f20-4fac-bfcd-ba93cf109860-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:04 crc kubenswrapper[4631]: I1204 17:32:04.824177 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ed2489f4-3f20-4fac-bfcd-ba93cf109860-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:05 crc kubenswrapper[4631]: I1204 17:32:05.277925 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"ed2489f4-3f20-4fac-bfcd-ba93cf109860","Type":"ContainerDied","Data":"41e189519f02570286d32f8131da26d63a9d5d1c968aeac949d98ef353555bf6"} Dec 04 17:32:05 crc kubenswrapper[4631]: I1204 17:32:05.277983 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41e189519f02570286d32f8131da26d63a9d5d1c968aeac949d98ef353555bf6" Dec 04 17:32:05 crc kubenswrapper[4631]: I1204 17:32:05.278135 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 04 17:32:08 crc kubenswrapper[4631]: I1204 17:32:08.520054 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-ctqxg"] Dec 04 17:32:09 crc kubenswrapper[4631]: I1204 17:32:09.303333 4631 generic.go:334] "Generic (PLEG): container finished" podID="63aa12c5-8868-471f-929f-df3697cc07ae" containerID="76de65428aa58e4062d7f6767470dc8dc5825ff82d5c908b7091ec8c3fe8bced" exitCode=0 Dec 04 17:32:09 crc kubenswrapper[4631]: I1204 17:32:09.303590 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dr5fh" event={"ID":"63aa12c5-8868-471f-929f-df3697cc07ae","Type":"ContainerDied","Data":"76de65428aa58e4062d7f6767470dc8dc5825ff82d5c908b7091ec8c3fe8bced"} Dec 04 17:32:09 crc kubenswrapper[4631]: I1204 17:32:09.308992 4631 generic.go:334] "Generic (PLEG): container finished" podID="29451954-9708-42c3-87aa-e28d06a5c640" containerID="c710697965be352c27146a6fd20506de8ac49b6e1772fa907a84fba829d725c1" exitCode=0 Dec 04 17:32:09 crc kubenswrapper[4631]: I1204 17:32:09.309033 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nx6gn" event={"ID":"29451954-9708-42c3-87aa-e28d06a5c640","Type":"ContainerDied","Data":"c710697965be352c27146a6fd20506de8ac49b6e1772fa907a84fba829d725c1"} Dec 04 17:32:10 crc kubenswrapper[4631]: I1204 17:32:10.315703 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dr5fh" event={"ID":"63aa12c5-8868-471f-929f-df3697cc07ae","Type":"ContainerStarted","Data":"826c260ecb54ee69638fdcb1ab7f1ad64201ff3895a4fdbb9d6026e1d0d23154"} Dec 04 17:32:10 crc kubenswrapper[4631]: I1204 17:32:10.320388 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54924" event={"ID":"2ff644ad-bdd2-4306-918d-fe5a0bfbc964","Type":"ContainerStarted","Data":"5ad646934ff2ff03f29cb8d3cd1717852d14e6fe9455ac79fffdd0e2dbb683bf"} Dec 04 17:32:10 crc kubenswrapper[4631]: I1204 17:32:10.325944 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nx6gn" event={"ID":"29451954-9708-42c3-87aa-e28d06a5c640","Type":"ContainerStarted","Data":"f5b443ba2e30f6566c7fa69c834a67662d42ac6e29b8726f82ce26c9e69d9b11"} Dec 04 17:32:10 crc kubenswrapper[4631]: I1204 17:32:10.343822 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dr5fh" podStartSLOduration=4.358957198 podStartE2EDuration="1m18.343804186s" podCreationTimestamp="2025-12-04 17:30:52 +0000 UTC" firstStartedPulling="2025-12-04 17:30:55.786058645 +0000 UTC m=+185.818300643" lastFinishedPulling="2025-12-04 17:32:09.770905633 +0000 UTC m=+259.803147631" observedRunningTime="2025-12-04 17:32:10.342648243 +0000 UTC m=+260.374890241" watchObservedRunningTime="2025-12-04 17:32:10.343804186 +0000 UTC m=+260.376046174" Dec 04 17:32:10 crc kubenswrapper[4631]: I1204 17:32:10.366357 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nx6gn" podStartSLOduration=5.457491645 podStartE2EDuration="1m19.366327724s" podCreationTimestamp="2025-12-04 17:30:51 +0000 UTC" firstStartedPulling="2025-12-04 17:30:55.785734326 +0000 UTC m=+185.817976324" lastFinishedPulling="2025-12-04 17:32:09.694570395 +0000 UTC m=+259.726812403" observedRunningTime="2025-12-04 17:32:10.363629505 +0000 UTC m=+260.395871513" watchObservedRunningTime="2025-12-04 17:32:10.366327724 +0000 UTC m=+260.398569722" Dec 04 17:32:12 crc kubenswrapper[4631]: I1204 17:32:12.305090 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nx6gn" Dec 04 17:32:12 crc kubenswrapper[4631]: I1204 17:32:12.305631 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nx6gn" Dec 04 17:32:12 crc kubenswrapper[4631]: I1204 17:32:12.343869 4631 generic.go:334] "Generic (PLEG): container finished" podID="2ff644ad-bdd2-4306-918d-fe5a0bfbc964" containerID="5ad646934ff2ff03f29cb8d3cd1717852d14e6fe9455ac79fffdd0e2dbb683bf" exitCode=0 Dec 04 17:32:12 crc kubenswrapper[4631]: I1204 17:32:12.343909 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54924" event={"ID":"2ff644ad-bdd2-4306-918d-fe5a0bfbc964","Type":"ContainerDied","Data":"5ad646934ff2ff03f29cb8d3cd1717852d14e6fe9455ac79fffdd0e2dbb683bf"} Dec 04 17:32:12 crc kubenswrapper[4631]: I1204 17:32:12.403131 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nx6gn" Dec 04 17:32:12 crc kubenswrapper[4631]: I1204 17:32:12.531383 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dr5fh" Dec 04 17:32:12 crc kubenswrapper[4631]: I1204 17:32:12.531436 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dr5fh" Dec 04 17:32:12 crc kubenswrapper[4631]: I1204 17:32:12.574197 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dr5fh" Dec 04 17:32:22 crc kubenswrapper[4631]: I1204 17:32:22.356851 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nx6gn" Dec 04 17:32:22 crc kubenswrapper[4631]: E1204 17:32:22.363663 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 04 17:32:22 crc kubenswrapper[4631]: E1204 17:32:22.363837 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-74nx8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-l85pg_openshift-marketplace(5cb73c44-e995-4e73-9bd3-422c00633ddf): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 04 17:32:22 crc kubenswrapper[4631]: E1204 17:32:22.365106 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-l85pg" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" Dec 04 17:32:22 crc kubenswrapper[4631]: I1204 17:32:22.594620 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dr5fh" Dec 04 17:32:23 crc kubenswrapper[4631]: I1204 17:32:23.404984 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54924" event={"ID":"2ff644ad-bdd2-4306-918d-fe5a0bfbc964","Type":"ContainerStarted","Data":"a996ce528f4f8cde7bb3cdcaceb578f8c4f917bb4eaba7f5c8ac04585d0a2c47"} Dec 04 17:32:23 crc kubenswrapper[4631]: I1204 17:32:23.406886 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxvz7" event={"ID":"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd","Type":"ContainerStarted","Data":"bfc5b219c97a31400861a947a35027550f0d57badd985180391465bcf6b1f63d"} Dec 04 17:32:23 crc kubenswrapper[4631]: I1204 17:32:23.409453 4631 generic.go:334] "Generic (PLEG): container finished" podID="885a0356-3029-482f-b3b5-3caa01e19c62" containerID="452eb3f344fb58845f606c81534a76a66aad0d17be38f08fdd3252552542cf5d" exitCode=0 Dec 04 17:32:23 crc kubenswrapper[4631]: I1204 17:32:23.409498 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9bhz" event={"ID":"885a0356-3029-482f-b3b5-3caa01e19c62","Type":"ContainerDied","Data":"452eb3f344fb58845f606c81534a76a66aad0d17be38f08fdd3252552542cf5d"} Dec 04 17:32:23 crc kubenswrapper[4631]: I1204 17:32:23.411329 4631 generic.go:334] "Generic (PLEG): container finished" podID="5622ec39-e11e-44c2-b059-47d6fc091328" containerID="6a5758c2c2a7a4a14f6fb8ffbf5719723db9895f37a1e8b6c8cf3d3980573628" exitCode=0 Dec 04 17:32:23 crc kubenswrapper[4631]: I1204 17:32:23.411358 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bvzck" event={"ID":"5622ec39-e11e-44c2-b059-47d6fc091328","Type":"ContainerDied","Data":"6a5758c2c2a7a4a14f6fb8ffbf5719723db9895f37a1e8b6c8cf3d3980573628"} Dec 04 17:32:23 crc kubenswrapper[4631]: I1204 17:32:23.412922 4631 generic.go:334] "Generic (PLEG): container finished" podID="e633bf80-04ad-4770-abc5-5d453077543c" containerID="461fb2020d0e2fd89b333a84f8a71f48c69ec012a1d2e9fe2377cfd96e9043e0" exitCode=0 Dec 04 17:32:23 crc kubenswrapper[4631]: I1204 17:32:23.412949 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bmh96" event={"ID":"e633bf80-04ad-4770-abc5-5d453077543c","Type":"ContainerDied","Data":"461fb2020d0e2fd89b333a84f8a71f48c69ec012a1d2e9fe2377cfd96e9043e0"} Dec 04 17:32:23 crc kubenswrapper[4631]: I1204 17:32:23.431890 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-54924" podStartSLOduration=6.855432787 podStartE2EDuration="1m31.431862659s" podCreationTimestamp="2025-12-04 17:30:52 +0000 UTC" firstStartedPulling="2025-12-04 17:30:57.820358292 +0000 UTC m=+187.852600290" lastFinishedPulling="2025-12-04 17:32:22.396788104 +0000 UTC m=+272.429030162" observedRunningTime="2025-12-04 17:32:23.42720058 +0000 UTC m=+273.459442598" watchObservedRunningTime="2025-12-04 17:32:23.431862659 +0000 UTC m=+273.464104677" Dec 04 17:32:24 crc kubenswrapper[4631]: I1204 17:32:24.045266 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dr5fh"] Dec 04 17:32:24 crc kubenswrapper[4631]: I1204 17:32:24.045545 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dr5fh" podUID="63aa12c5-8868-471f-929f-df3697cc07ae" containerName="registry-server" containerID="cri-o://826c260ecb54ee69638fdcb1ab7f1ad64201ff3895a4fdbb9d6026e1d0d23154" gracePeriod=2 Dec 04 17:32:24 crc kubenswrapper[4631]: I1204 17:32:24.420156 4631 generic.go:334] "Generic (PLEG): container finished" podID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" containerID="bfc5b219c97a31400861a947a35027550f0d57badd985180391465bcf6b1f63d" exitCode=0 Dec 04 17:32:24 crc kubenswrapper[4631]: I1204 17:32:24.420207 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxvz7" event={"ID":"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd","Type":"ContainerDied","Data":"bfc5b219c97a31400861a947a35027550f0d57badd985180391465bcf6b1f63d"} Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.135280 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dr5fh" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.328006 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63aa12c5-8868-471f-929f-df3697cc07ae-catalog-content\") pod \"63aa12c5-8868-471f-929f-df3697cc07ae\" (UID: \"63aa12c5-8868-471f-929f-df3697cc07ae\") " Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.328169 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjlwh\" (UniqueName: \"kubernetes.io/projected/63aa12c5-8868-471f-929f-df3697cc07ae-kube-api-access-fjlwh\") pod \"63aa12c5-8868-471f-929f-df3697cc07ae\" (UID: \"63aa12c5-8868-471f-929f-df3697cc07ae\") " Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.328262 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63aa12c5-8868-471f-929f-df3697cc07ae-utilities\") pod \"63aa12c5-8868-471f-929f-df3697cc07ae\" (UID: \"63aa12c5-8868-471f-929f-df3697cc07ae\") " Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.328924 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63aa12c5-8868-471f-929f-df3697cc07ae-utilities" (OuterVolumeSpecName: "utilities") pod "63aa12c5-8868-471f-929f-df3697cc07ae" (UID: "63aa12c5-8868-471f-929f-df3697cc07ae"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.346608 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63aa12c5-8868-471f-929f-df3697cc07ae-kube-api-access-fjlwh" (OuterVolumeSpecName: "kube-api-access-fjlwh") pod "63aa12c5-8868-471f-929f-df3697cc07ae" (UID: "63aa12c5-8868-471f-929f-df3697cc07ae"). InnerVolumeSpecName "kube-api-access-fjlwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.350823 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63aa12c5-8868-471f-929f-df3697cc07ae-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "63aa12c5-8868-471f-929f-df3697cc07ae" (UID: "63aa12c5-8868-471f-929f-df3697cc07ae"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.433902 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63aa12c5-8868-471f-929f-df3697cc07ae-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.435051 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63aa12c5-8868-471f-929f-df3697cc07ae-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.435067 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjlwh\" (UniqueName: \"kubernetes.io/projected/63aa12c5-8868-471f-929f-df3697cc07ae-kube-api-access-fjlwh\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.437014 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxvz7" event={"ID":"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd","Type":"ContainerStarted","Data":"c54d41a82213ea8f32b326c3427b5324d33ebf29a25561b5d5afcb04fc7be8d4"} Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.441272 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9bhz" event={"ID":"885a0356-3029-482f-b3b5-3caa01e19c62","Type":"ContainerStarted","Data":"d7159ffc18c8ec88c3878c3576dda83febe7a006e45acb65f2dd5ddbfa777e76"} Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.443408 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bvzck" event={"ID":"5622ec39-e11e-44c2-b059-47d6fc091328","Type":"ContainerStarted","Data":"c1598d88a9f986b17871c5f2d8f7d01b8011cc3189fb7d17b254117c64a7ed82"} Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.465302 4631 generic.go:334] "Generic (PLEG): container finished" podID="63aa12c5-8868-471f-929f-df3697cc07ae" containerID="826c260ecb54ee69638fdcb1ab7f1ad64201ff3895a4fdbb9d6026e1d0d23154" exitCode=0 Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.465406 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dr5fh" event={"ID":"63aa12c5-8868-471f-929f-df3697cc07ae","Type":"ContainerDied","Data":"826c260ecb54ee69638fdcb1ab7f1ad64201ff3895a4fdbb9d6026e1d0d23154"} Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.465440 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dr5fh" event={"ID":"63aa12c5-8868-471f-929f-df3697cc07ae","Type":"ContainerDied","Data":"cd539170e1bcba7b73a8c696984b5b61ed390088e90bec54b8bf572f2ad440ae"} Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.465462 4631 scope.go:117] "RemoveContainer" containerID="826c260ecb54ee69638fdcb1ab7f1ad64201ff3895a4fdbb9d6026e1d0d23154" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.465932 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dr5fh" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.471923 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bxvz7" podStartSLOduration=11.465862295 podStartE2EDuration="1m33.471909037s" podCreationTimestamp="2025-12-04 17:30:52 +0000 UTC" firstStartedPulling="2025-12-04 17:31:02.886478946 +0000 UTC m=+192.918720934" lastFinishedPulling="2025-12-04 17:32:24.892525668 +0000 UTC m=+274.924767676" observedRunningTime="2025-12-04 17:32:25.466652921 +0000 UTC m=+275.498894919" watchObservedRunningTime="2025-12-04 17:32:25.471909037 +0000 UTC m=+275.504151035" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.485268 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bmh96" event={"ID":"e633bf80-04ad-4770-abc5-5d453077543c","Type":"ContainerStarted","Data":"67a93955a8caa57944cee1cfde94a8f78ea5ef57934a0946566c57d489527adf"} Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.490681 4631 scope.go:117] "RemoveContainer" containerID="76de65428aa58e4062d7f6767470dc8dc5825ff82d5c908b7091ec8c3fe8bced" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.508102 4631 scope.go:117] "RemoveContainer" containerID="633189f10e04b150dc60946f6981efcd624c47566b3589d6ec327dc85c3c64c2" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.538970 4631 scope.go:117] "RemoveContainer" containerID="826c260ecb54ee69638fdcb1ab7f1ad64201ff3895a4fdbb9d6026e1d0d23154" Dec 04 17:32:25 crc kubenswrapper[4631]: E1204 17:32:25.541282 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"826c260ecb54ee69638fdcb1ab7f1ad64201ff3895a4fdbb9d6026e1d0d23154\": container with ID starting with 826c260ecb54ee69638fdcb1ab7f1ad64201ff3895a4fdbb9d6026e1d0d23154 not found: ID does not exist" containerID="826c260ecb54ee69638fdcb1ab7f1ad64201ff3895a4fdbb9d6026e1d0d23154" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.541414 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"826c260ecb54ee69638fdcb1ab7f1ad64201ff3895a4fdbb9d6026e1d0d23154"} err="failed to get container status \"826c260ecb54ee69638fdcb1ab7f1ad64201ff3895a4fdbb9d6026e1d0d23154\": rpc error: code = NotFound desc = could not find container \"826c260ecb54ee69638fdcb1ab7f1ad64201ff3895a4fdbb9d6026e1d0d23154\": container with ID starting with 826c260ecb54ee69638fdcb1ab7f1ad64201ff3895a4fdbb9d6026e1d0d23154 not found: ID does not exist" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.541515 4631 scope.go:117] "RemoveContainer" containerID="76de65428aa58e4062d7f6767470dc8dc5825ff82d5c908b7091ec8c3fe8bced" Dec 04 17:32:25 crc kubenswrapper[4631]: E1204 17:32:25.541927 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76de65428aa58e4062d7f6767470dc8dc5825ff82d5c908b7091ec8c3fe8bced\": container with ID starting with 76de65428aa58e4062d7f6767470dc8dc5825ff82d5c908b7091ec8c3fe8bced not found: ID does not exist" containerID="76de65428aa58e4062d7f6767470dc8dc5825ff82d5c908b7091ec8c3fe8bced" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.541978 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76de65428aa58e4062d7f6767470dc8dc5825ff82d5c908b7091ec8c3fe8bced"} err="failed to get container status \"76de65428aa58e4062d7f6767470dc8dc5825ff82d5c908b7091ec8c3fe8bced\": rpc error: code = NotFound desc = could not find container \"76de65428aa58e4062d7f6767470dc8dc5825ff82d5c908b7091ec8c3fe8bced\": container with ID starting with 76de65428aa58e4062d7f6767470dc8dc5825ff82d5c908b7091ec8c3fe8bced not found: ID does not exist" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.542017 4631 scope.go:117] "RemoveContainer" containerID="633189f10e04b150dc60946f6981efcd624c47566b3589d6ec327dc85c3c64c2" Dec 04 17:32:25 crc kubenswrapper[4631]: E1204 17:32:25.543839 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"633189f10e04b150dc60946f6981efcd624c47566b3589d6ec327dc85c3c64c2\": container with ID starting with 633189f10e04b150dc60946f6981efcd624c47566b3589d6ec327dc85c3c64c2 not found: ID does not exist" containerID="633189f10e04b150dc60946f6981efcd624c47566b3589d6ec327dc85c3c64c2" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.543866 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"633189f10e04b150dc60946f6981efcd624c47566b3589d6ec327dc85c3c64c2"} err="failed to get container status \"633189f10e04b150dc60946f6981efcd624c47566b3589d6ec327dc85c3c64c2\": rpc error: code = NotFound desc = could not find container \"633189f10e04b150dc60946f6981efcd624c47566b3589d6ec327dc85c3c64c2\": container with ID starting with 633189f10e04b150dc60946f6981efcd624c47566b3589d6ec327dc85c3c64c2 not found: ID does not exist" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.549556 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-n9bhz" podStartSLOduration=5.250349891 podStartE2EDuration="1m36.549544462s" podCreationTimestamp="2025-12-04 17:30:49 +0000 UTC" firstStartedPulling="2025-12-04 17:30:53.735284241 +0000 UTC m=+183.767526239" lastFinishedPulling="2025-12-04 17:32:25.034478812 +0000 UTC m=+275.066720810" observedRunningTime="2025-12-04 17:32:25.522321553 +0000 UTC m=+275.554563551" watchObservedRunningTime="2025-12-04 17:32:25.549544462 +0000 UTC m=+275.581786460" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.571286 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bvzck" podStartSLOduration=5.512294621 podStartE2EDuration="1m35.571265696s" podCreationTimestamp="2025-12-04 17:30:50 +0000 UTC" firstStartedPulling="2025-12-04 17:30:54.764534894 +0000 UTC m=+184.796776892" lastFinishedPulling="2025-12-04 17:32:24.823505949 +0000 UTC m=+274.855747967" observedRunningTime="2025-12-04 17:32:25.551282823 +0000 UTC m=+275.583524821" watchObservedRunningTime="2025-12-04 17:32:25.571265696 +0000 UTC m=+275.603507694" Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.571499 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dr5fh"] Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.576217 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dr5fh"] Dec 04 17:32:25 crc kubenswrapper[4631]: I1204 17:32:25.604778 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bmh96" podStartSLOduration=7.653671191 podStartE2EDuration="1m36.604761271s" podCreationTimestamp="2025-12-04 17:30:49 +0000 UTC" firstStartedPulling="2025-12-04 17:30:55.785598902 +0000 UTC m=+185.817840900" lastFinishedPulling="2025-12-04 17:32:24.736688982 +0000 UTC m=+274.768930980" observedRunningTime="2025-12-04 17:32:25.60101769 +0000 UTC m=+275.633259688" watchObservedRunningTime="2025-12-04 17:32:25.604761271 +0000 UTC m=+275.637003269" Dec 04 17:32:26 crc kubenswrapper[4631]: I1204 17:32:26.247662 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63aa12c5-8868-471f-929f-df3697cc07ae" path="/var/lib/kubelet/pods/63aa12c5-8868-471f-929f-df3697cc07ae/volumes" Dec 04 17:32:26 crc kubenswrapper[4631]: I1204 17:32:26.249099 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:32:26 crc kubenswrapper[4631]: I1204 17:32:26.249154 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:32:26 crc kubenswrapper[4631]: I1204 17:32:26.249188 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:32:26 crc kubenswrapper[4631]: I1204 17:32:26.249221 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:32:26 crc kubenswrapper[4631]: I1204 17:32:26.251443 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 04 17:32:26 crc kubenswrapper[4631]: I1204 17:32:26.251680 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 04 17:32:26 crc kubenswrapper[4631]: I1204 17:32:26.252018 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 04 17:32:26 crc kubenswrapper[4631]: I1204 17:32:26.261159 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:32:26 crc kubenswrapper[4631]: I1204 17:32:26.262291 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 04 17:32:26 crc kubenswrapper[4631]: I1204 17:32:26.267255 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:32:26 crc kubenswrapper[4631]: I1204 17:32:26.276148 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:32:26 crc kubenswrapper[4631]: I1204 17:32:26.277245 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:32:26 crc kubenswrapper[4631]: I1204 17:32:26.457898 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:32:26 crc kubenswrapper[4631]: I1204 17:32:26.468301 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 04 17:32:26 crc kubenswrapper[4631]: I1204 17:32:26.478418 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 04 17:32:26 crc kubenswrapper[4631]: W1204 17:32:26.792591 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-3102582fd8df44965562f219827bc6a5be023ac08230a586c968ccbdf43c9513 WatchSource:0}: Error finding container 3102582fd8df44965562f219827bc6a5be023ac08230a586c968ccbdf43c9513: Status 404 returned error can't find the container with id 3102582fd8df44965562f219827bc6a5be023ac08230a586c968ccbdf43c9513 Dec 04 17:32:27 crc kubenswrapper[4631]: W1204 17:32:27.113064 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-31a63f66a368dae6d99cdf41265fb68f7513878a81032e1ac7e7395d00450d4b WatchSource:0}: Error finding container 31a63f66a368dae6d99cdf41265fb68f7513878a81032e1ac7e7395d00450d4b: Status 404 returned error can't find the container with id 31a63f66a368dae6d99cdf41265fb68f7513878a81032e1ac7e7395d00450d4b Dec 04 17:32:27 crc kubenswrapper[4631]: I1204 17:32:27.519280 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"22aea21ddd0d1459886ef658d1e7c99c81daa72a27904b501ca16fbf971f8ae3"} Dec 04 17:32:27 crc kubenswrapper[4631]: I1204 17:32:27.520723 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"31a63f66a368dae6d99cdf41265fb68f7513878a81032e1ac7e7395d00450d4b"} Dec 04 17:32:27 crc kubenswrapper[4631]: I1204 17:32:27.522253 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"f65bc902ef245d13310f1b2ea5dcd398f7bcd9cdff3f6985700272edee9f0b58"} Dec 04 17:32:27 crc kubenswrapper[4631]: I1204 17:32:27.522280 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"3102582fd8df44965562f219827bc6a5be023ac08230a586c968ccbdf43c9513"} Dec 04 17:32:27 crc kubenswrapper[4631]: I1204 17:32:27.522570 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:32:29 crc kubenswrapper[4631]: I1204 17:32:29.533810 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"95244187152d20b4b68bcf0e4e7ef2137c13818db38a6eb1cc164e6a950ce697"} Dec 04 17:32:29 crc kubenswrapper[4631]: I1204 17:32:29.535598 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"85c49ada2a2c07251ffcc53944b7a59e67eee50851b2c4ed77b8796862b835bd"} Dec 04 17:32:29 crc kubenswrapper[4631]: I1204 17:32:29.973781 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-n9bhz" Dec 04 17:32:29 crc kubenswrapper[4631]: I1204 17:32:29.973857 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-n9bhz" Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:30.020632 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-n9bhz" Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:30.078586 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bmh96" Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:30.078665 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bmh96" Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:30.114190 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bmh96" Dec 04 17:32:34 crc kubenswrapper[4631]: E1204 17:32:31.684475 4631 kubelet.go:2526] "Housekeeping took longer than expected" err="housekeeping took too long" expected="1s" actual="1.446s" Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:31.684910 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bvzck" Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:31.684930 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bvzck" Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:31.684974 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bmh96" Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:31.684998 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-n9bhz" Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:31.685065 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bvzck" Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:31.733413 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bvzck" Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:32.448423 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bvzck"] Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:32.955263 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-54924" Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:32.955330 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-54924" Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:32.994144 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-54924" Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:33.173358 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bxvz7" Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:33.173412 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bxvz7" Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:33.210141 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bxvz7" Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:33.563699 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" podUID="e4ee6a0c-a43b-4cef-b54d-498f84fc947e" containerName="oauth-openshift" containerID="cri-o://c01491010c9c6088ef01de3ea367059c5c79d7f0d4a0e3fe49ffaebd0a7b1b9b" gracePeriod=15 Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:33.563924 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bvzck" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" containerName="registry-server" containerID="cri-o://c1598d88a9f986b17871c5f2d8f7d01b8011cc3189fb7d17b254117c64a7ed82" gracePeriod=2 Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:33.622719 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-54924" Dec 04 17:32:34 crc kubenswrapper[4631]: I1204 17:32:33.625062 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bxvz7" Dec 04 17:32:35 crc kubenswrapper[4631]: I1204 17:32:35.575941 4631 generic.go:334] "Generic (PLEG): container finished" podID="e4ee6a0c-a43b-4cef-b54d-498f84fc947e" containerID="c01491010c9c6088ef01de3ea367059c5c79d7f0d4a0e3fe49ffaebd0a7b1b9b" exitCode=0 Dec 04 17:32:35 crc kubenswrapper[4631]: I1204 17:32:35.575989 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" event={"ID":"e4ee6a0c-a43b-4cef-b54d-498f84fc947e","Type":"ContainerDied","Data":"c01491010c9c6088ef01de3ea367059c5c79d7f0d4a0e3fe49ffaebd0a7b1b9b"} Dec 04 17:32:35 crc kubenswrapper[4631]: I1204 17:32:35.579141 4631 generic.go:334] "Generic (PLEG): container finished" podID="5622ec39-e11e-44c2-b059-47d6fc091328" containerID="c1598d88a9f986b17871c5f2d8f7d01b8011cc3189fb7d17b254117c64a7ed82" exitCode=0 Dec 04 17:32:35 crc kubenswrapper[4631]: I1204 17:32:35.579176 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bvzck" event={"ID":"5622ec39-e11e-44c2-b059-47d6fc091328","Type":"ContainerDied","Data":"c1598d88a9f986b17871c5f2d8f7d01b8011cc3189fb7d17b254117c64a7ed82"} Dec 04 17:32:36 crc kubenswrapper[4631]: E1204 17:32:36.240992 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-l85pg" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" Dec 04 17:32:36 crc kubenswrapper[4631]: I1204 17:32:36.960189 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:32:36 crc kubenswrapper[4631]: I1204 17:32:36.987775 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-cc7989dc6-mvfhk"] Dec 04 17:32:36 crc kubenswrapper[4631]: E1204 17:32:36.988027 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63aa12c5-8868-471f-929f-df3697cc07ae" containerName="extract-content" Dec 04 17:32:36 crc kubenswrapper[4631]: I1204 17:32:36.988038 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="63aa12c5-8868-471f-929f-df3697cc07ae" containerName="extract-content" Dec 04 17:32:36 crc kubenswrapper[4631]: E1204 17:32:36.988054 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63aa12c5-8868-471f-929f-df3697cc07ae" containerName="registry-server" Dec 04 17:32:36 crc kubenswrapper[4631]: I1204 17:32:36.988060 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="63aa12c5-8868-471f-929f-df3697cc07ae" containerName="registry-server" Dec 04 17:32:36 crc kubenswrapper[4631]: E1204 17:32:36.988068 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63aa12c5-8868-471f-929f-df3697cc07ae" containerName="extract-utilities" Dec 04 17:32:36 crc kubenswrapper[4631]: I1204 17:32:36.988075 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="63aa12c5-8868-471f-929f-df3697cc07ae" containerName="extract-utilities" Dec 04 17:32:36 crc kubenswrapper[4631]: E1204 17:32:36.988083 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4ee6a0c-a43b-4cef-b54d-498f84fc947e" containerName="oauth-openshift" Dec 04 17:32:36 crc kubenswrapper[4631]: I1204 17:32:36.988089 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4ee6a0c-a43b-4cef-b54d-498f84fc947e" containerName="oauth-openshift" Dec 04 17:32:36 crc kubenswrapper[4631]: E1204 17:32:36.988096 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed2489f4-3f20-4fac-bfcd-ba93cf109860" containerName="pruner" Dec 04 17:32:36 crc kubenswrapper[4631]: I1204 17:32:36.988103 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed2489f4-3f20-4fac-bfcd-ba93cf109860" containerName="pruner" Dec 04 17:32:36 crc kubenswrapper[4631]: I1204 17:32:36.988195 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="63aa12c5-8868-471f-929f-df3697cc07ae" containerName="registry-server" Dec 04 17:32:36 crc kubenswrapper[4631]: I1204 17:32:36.988205 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4ee6a0c-a43b-4cef-b54d-498f84fc947e" containerName="oauth-openshift" Dec 04 17:32:36 crc kubenswrapper[4631]: I1204 17:32:36.988214 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed2489f4-3f20-4fac-bfcd-ba93cf109860" containerName="pruner" Dec 04 17:32:36 crc kubenswrapper[4631]: I1204 17:32:36.988643 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.006443 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-cc7989dc6-mvfhk"] Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.041724 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-template-error\") pod \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.041788 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-template-provider-selection\") pod \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.041816 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-audit-dir\") pod \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.041834 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-ocp-branding-template\") pod \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.041852 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-audit-policies\") pod \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.041876 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-template-login\") pod \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.041894 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-serving-cert\") pod \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.041926 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-session\") pod \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.041954 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-idp-0-file-data\") pod \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.041984 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-service-ca\") pod \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042007 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-router-certs\") pod \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042024 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-trusted-ca-bundle\") pod \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042049 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-klrg9\" (UniqueName: \"kubernetes.io/projected/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-kube-api-access-klrg9\") pod \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042079 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-cliconfig\") pod \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\" (UID: \"e4ee6a0c-a43b-4cef-b54d-498f84fc947e\") " Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042097 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "e4ee6a0c-a43b-4cef-b54d-498f84fc947e" (UID: "e4ee6a0c-a43b-4cef-b54d-498f84fc947e"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042218 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8j9w\" (UniqueName: \"kubernetes.io/projected/0e227500-d232-4c7b-b78e-24f66d88f82a-kube-api-access-h8j9w\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042238 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042277 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-session\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042294 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042319 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042344 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042360 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-user-template-error\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042390 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042408 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-service-ca\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042423 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-router-certs\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042453 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0e227500-d232-4c7b-b78e-24f66d88f82a-audit-policies\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042478 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042501 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0e227500-d232-4c7b-b78e-24f66d88f82a-audit-dir\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042531 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-user-template-login\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.042568 4631 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.043208 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "e4ee6a0c-a43b-4cef-b54d-498f84fc947e" (UID: "e4ee6a0c-a43b-4cef-b54d-498f84fc947e"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.043321 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "e4ee6a0c-a43b-4cef-b54d-498f84fc947e" (UID: "e4ee6a0c-a43b-4cef-b54d-498f84fc947e"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.049163 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "e4ee6a0c-a43b-4cef-b54d-498f84fc947e" (UID: "e4ee6a0c-a43b-4cef-b54d-498f84fc947e"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.049443 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "e4ee6a0c-a43b-4cef-b54d-498f84fc947e" (UID: "e4ee6a0c-a43b-4cef-b54d-498f84fc947e"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.049948 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "e4ee6a0c-a43b-4cef-b54d-498f84fc947e" (UID: "e4ee6a0c-a43b-4cef-b54d-498f84fc947e"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.050493 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "e4ee6a0c-a43b-4cef-b54d-498f84fc947e" (UID: "e4ee6a0c-a43b-4cef-b54d-498f84fc947e"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.051073 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-kube-api-access-klrg9" (OuterVolumeSpecName: "kube-api-access-klrg9") pod "e4ee6a0c-a43b-4cef-b54d-498f84fc947e" (UID: "e4ee6a0c-a43b-4cef-b54d-498f84fc947e"). InnerVolumeSpecName "kube-api-access-klrg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.057761 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bxvz7"] Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.058010 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bxvz7" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" containerName="registry-server" containerID="cri-o://c54d41a82213ea8f32b326c3427b5324d33ebf29a25561b5d5afcb04fc7be8d4" gracePeriod=2 Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.059713 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "e4ee6a0c-a43b-4cef-b54d-498f84fc947e" (UID: "e4ee6a0c-a43b-4cef-b54d-498f84fc947e"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.065656 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "e4ee6a0c-a43b-4cef-b54d-498f84fc947e" (UID: "e4ee6a0c-a43b-4cef-b54d-498f84fc947e"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.070762 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "e4ee6a0c-a43b-4cef-b54d-498f84fc947e" (UID: "e4ee6a0c-a43b-4cef-b54d-498f84fc947e"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.071137 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "e4ee6a0c-a43b-4cef-b54d-498f84fc947e" (UID: "e4ee6a0c-a43b-4cef-b54d-498f84fc947e"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.071709 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "e4ee6a0c-a43b-4cef-b54d-498f84fc947e" (UID: "e4ee6a0c-a43b-4cef-b54d-498f84fc947e"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.074513 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "e4ee6a0c-a43b-4cef-b54d-498f84fc947e" (UID: "e4ee6a0c-a43b-4cef-b54d-498f84fc947e"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.143851 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0e227500-d232-4c7b-b78e-24f66d88f82a-audit-policies\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.143934 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.143987 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0e227500-d232-4c7b-b78e-24f66d88f82a-audit-dir\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144021 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-user-template-login\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144083 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8j9w\" (UniqueName: \"kubernetes.io/projected/0e227500-d232-4c7b-b78e-24f66d88f82a-kube-api-access-h8j9w\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144118 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144146 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-session\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144164 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144161 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0e227500-d232-4c7b-b78e-24f66d88f82a-audit-dir\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144212 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144245 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144279 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-user-template-error\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144298 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144313 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-service-ca\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144331 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-router-certs\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144672 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144778 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144792 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144802 4631 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144811 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144823 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144833 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144860 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144871 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144880 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144888 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144900 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-klrg9\" (UniqueName: \"kubernetes.io/projected/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-kube-api-access-klrg9\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.144910 4631 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e4ee6a0c-a43b-4cef-b54d-498f84fc947e-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.145148 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0e227500-d232-4c7b-b78e-24f66d88f82a-audit-policies\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.146309 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.146710 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-service-ca\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.148127 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.149452 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.149747 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-router-certs\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.150331 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-user-template-login\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.151927 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-session\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.152048 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.152754 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-user-template-error\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.153092 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.153087 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0e227500-d232-4c7b-b78e-24f66d88f82a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.165144 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8j9w\" (UniqueName: \"kubernetes.io/projected/0e227500-d232-4c7b-b78e-24f66d88f82a-kube-api-access-h8j9w\") pod \"oauth-openshift-cc7989dc6-mvfhk\" (UID: \"0e227500-d232-4c7b-b78e-24f66d88f82a\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.311187 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.596683 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" event={"ID":"e4ee6a0c-a43b-4cef-b54d-498f84fc947e","Type":"ContainerDied","Data":"8e121c9be77c1c37c528dc16d7d10117f402c63b04607552efdde148f87f0e97"} Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.597285 4631 scope.go:117] "RemoveContainer" containerID="c01491010c9c6088ef01de3ea367059c5c79d7f0d4a0e3fe49ffaebd0a7b1b9b" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.596891 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-ctqxg" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.624290 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-ctqxg"] Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.635946 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-ctqxg"] Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.646213 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bvzck" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.734267 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-cc7989dc6-mvfhk"] Dec 04 17:32:37 crc kubenswrapper[4631]: W1204 17:32:37.739341 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0e227500_d232_4c7b_b78e_24f66d88f82a.slice/crio-a84eee26df1e92cd22689d2da79e25b6d390b6bf2e9c035ed5fe56352d67bb2b WatchSource:0}: Error finding container a84eee26df1e92cd22689d2da79e25b6d390b6bf2e9c035ed5fe56352d67bb2b: Status 404 returned error can't find the container with id a84eee26df1e92cd22689d2da79e25b6d390b6bf2e9c035ed5fe56352d67bb2b Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.754540 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5622ec39-e11e-44c2-b059-47d6fc091328-utilities\") pod \"5622ec39-e11e-44c2-b059-47d6fc091328\" (UID: \"5622ec39-e11e-44c2-b059-47d6fc091328\") " Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.754597 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hcr6b\" (UniqueName: \"kubernetes.io/projected/5622ec39-e11e-44c2-b059-47d6fc091328-kube-api-access-hcr6b\") pod \"5622ec39-e11e-44c2-b059-47d6fc091328\" (UID: \"5622ec39-e11e-44c2-b059-47d6fc091328\") " Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.754775 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5622ec39-e11e-44c2-b059-47d6fc091328-catalog-content\") pod \"5622ec39-e11e-44c2-b059-47d6fc091328\" (UID: \"5622ec39-e11e-44c2-b059-47d6fc091328\") " Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.755594 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5622ec39-e11e-44c2-b059-47d6fc091328-utilities" (OuterVolumeSpecName: "utilities") pod "5622ec39-e11e-44c2-b059-47d6fc091328" (UID: "5622ec39-e11e-44c2-b059-47d6fc091328"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.759120 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5622ec39-e11e-44c2-b059-47d6fc091328-kube-api-access-hcr6b" (OuterVolumeSpecName: "kube-api-access-hcr6b") pod "5622ec39-e11e-44c2-b059-47d6fc091328" (UID: "5622ec39-e11e-44c2-b059-47d6fc091328"). InnerVolumeSpecName "kube-api-access-hcr6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.806954 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5622ec39-e11e-44c2-b059-47d6fc091328-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5622ec39-e11e-44c2-b059-47d6fc091328" (UID: "5622ec39-e11e-44c2-b059-47d6fc091328"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.855883 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5622ec39-e11e-44c2-b059-47d6fc091328-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.855915 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5622ec39-e11e-44c2-b059-47d6fc091328-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:37 crc kubenswrapper[4631]: I1204 17:32:37.855925 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hcr6b\" (UniqueName: \"kubernetes.io/projected/5622ec39-e11e-44c2-b059-47d6fc091328-kube-api-access-hcr6b\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.246606 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4ee6a0c-a43b-4cef-b54d-498f84fc947e" path="/var/lib/kubelet/pods/e4ee6a0c-a43b-4cef-b54d-498f84fc947e/volumes" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.515821 4631 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 04 17:32:38 crc kubenswrapper[4631]: E1204 17:32:38.516114 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" containerName="registry-server" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.516130 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" containerName="registry-server" Dec 04 17:32:38 crc kubenswrapper[4631]: E1204 17:32:38.516150 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" containerName="extract-content" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.516159 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" containerName="extract-content" Dec 04 17:32:38 crc kubenswrapper[4631]: E1204 17:32:38.516180 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" containerName="extract-utilities" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.516188 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" containerName="extract-utilities" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.516308 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" containerName="registry-server" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.516808 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.517365 4631 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.517613 4631 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.517672 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284" gracePeriod=15 Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.517831 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad" gracePeriod=15 Dec 04 17:32:38 crc kubenswrapper[4631]: E1204 17:32:38.517844 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.517894 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 04 17:32:38 crc kubenswrapper[4631]: E1204 17:32:38.517919 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.517929 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 04 17:32:38 crc kubenswrapper[4631]: E1204 17:32:38.517942 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.517951 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 04 17:32:38 crc kubenswrapper[4631]: E1204 17:32:38.517964 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.517974 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 04 17:32:38 crc kubenswrapper[4631]: E1204 17:32:38.517990 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.517998 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 04 17:32:38 crc kubenswrapper[4631]: E1204 17:32:38.518007 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.518015 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.518141 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.518154 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.518172 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.518184 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.518194 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.518623 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e" gracePeriod=15 Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.518704 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a" gracePeriod=15 Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.518721 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f" gracePeriod=15 Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.563534 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.563619 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.563658 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.563689 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.563709 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.563734 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.563754 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.563790 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.605756 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" event={"ID":"0e227500-d232-4c7b-b78e-24f66d88f82a","Type":"ContainerStarted","Data":"96bbb66d0af03636e5c22c9cdce8fc0906f21172253adaca855716006c5d273a"} Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.605816 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" event={"ID":"0e227500-d232-4c7b-b78e-24f66d88f82a","Type":"ContainerStarted","Data":"a84eee26df1e92cd22689d2da79e25b6d390b6bf2e9c035ed5fe56352d67bb2b"} Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.606294 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.611070 4631 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.611386 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.612676 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bvzck" event={"ID":"5622ec39-e11e-44c2-b059-47d6fc091328","Type":"ContainerDied","Data":"f50665149f641a9fb28136a2c1e3e33f9d645a6766c2b929e8bd1ff3c9ac49e7"} Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.612737 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bvzck" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.612756 4631 scope.go:117] "RemoveContainer" containerID="c1598d88a9f986b17871c5f2d8f7d01b8011cc3189fb7d17b254117c64a7ed82" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.615122 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.615656 4631 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.616319 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.617047 4631 generic.go:334] "Generic (PLEG): container finished" podID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" containerID="c54d41a82213ea8f32b326c3427b5324d33ebf29a25561b5d5afcb04fc7be8d4" exitCode=0 Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.617112 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxvz7" event={"ID":"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd","Type":"ContainerDied","Data":"c54d41a82213ea8f32b326c3427b5324d33ebf29a25561b5d5afcb04fc7be8d4"} Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.617578 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.617797 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.618002 4631 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.630339 4631 scope.go:117] "RemoveContainer" containerID="6a5758c2c2a7a4a14f6fb8ffbf5719723db9895f37a1e8b6c8cf3d3980573628" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.667797 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.668265 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.668401 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.668509 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.668655 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.668743 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.668878 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.668985 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.669913 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.670047 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.670324 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.670407 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.670443 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.670855 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.670889 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.670925 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.709837 4631 scope.go:117] "RemoveContainer" containerID="f5f0f80630b8839681738701a9a1f37754e90835dcbf0d2c2f6bf46f13c1d8f4" Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.721738 4631 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" start-of-body= Dec 04 17:32:38 crc kubenswrapper[4631]: I1204 17:32:38.721802 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" Dec 04 17:32:38 crc kubenswrapper[4631]: E1204 17:32:38.815077 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:32:38Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:32:38Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:32:38Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:32:38Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:76fa3a070e2d266753455b1f5cdc0275f55a5492ac0eb2a953655259ed14e122\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:d7d1951816ca8fc0605d2e29f392d6b1ef65d09ea9454f3c3f9972568e0eb00b\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1610200900},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:18451dc6c69687adf6c276ed6a28aa8c7ab6e963bf16e3be5074938ed0a05bf0\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:3451c27f73d0c1e1ee92e475fc2228a4ddacfef2189ed3613dcffd2fdedc88e3\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1207237969},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:9932555c9cde7a4fafbb069ced4e1874d584d0161ef1681710ba649966fa3aed\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:eb617b21d6eac1158f0772431048128991ac63dea611ddc67d9594df748921e7\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1201438029},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:e8990432556acad31519b1a73ec32f32d27c2034cf9e5cc4db8980efc7331594\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:ebe9f523f5c211a3a0f2570331dddcd5be15b12c1fecd9b8b121f881bfaad029\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1129027903},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:38 crc kubenswrapper[4631]: E1204 17:32:38.815727 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:38 crc kubenswrapper[4631]: E1204 17:32:38.815954 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:38 crc kubenswrapper[4631]: E1204 17:32:38.816164 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:38 crc kubenswrapper[4631]: E1204 17:32:38.816390 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:38 crc kubenswrapper[4631]: E1204 17:32:38.816472 4631 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 17:32:39 crc kubenswrapper[4631]: E1204 17:32:39.265339 4631 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.194:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" volumeName="registry-storage" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.280716 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bxvz7" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.281600 4631 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.281834 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.282128 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.282334 4631 status_manager.go:851] "Failed to get status for pod" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" pod="openshift-marketplace/redhat-operators-bxvz7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-bxvz7\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.379943 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8kcm9\" (UniqueName: \"kubernetes.io/projected/7c6c775a-ae4c-4682-97fd-5f9e4457f8fd-kube-api-access-8kcm9\") pod \"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd\" (UID: \"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd\") " Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.380365 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c6c775a-ae4c-4682-97fd-5f9e4457f8fd-catalog-content\") pod \"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd\" (UID: \"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd\") " Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.380452 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c6c775a-ae4c-4682-97fd-5f9e4457f8fd-utilities\") pod \"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd\" (UID: \"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd\") " Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.381875 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c6c775a-ae4c-4682-97fd-5f9e4457f8fd-utilities" (OuterVolumeSpecName: "utilities") pod "7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" (UID: "7c6c775a-ae4c-4682-97fd-5f9e4457f8fd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.392617 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c6c775a-ae4c-4682-97fd-5f9e4457f8fd-kube-api-access-8kcm9" (OuterVolumeSpecName: "kube-api-access-8kcm9") pod "7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" (UID: "7c6c775a-ae4c-4682-97fd-5f9e4457f8fd"). InnerVolumeSpecName "kube-api-access-8kcm9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.465749 4631 patch_prober.go:28] interesting pod/oauth-openshift-cc7989dc6-mvfhk container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.56:6443/healthz\": read tcp 10.217.0.2:60420->10.217.0.56:6443: read: connection reset by peer" start-of-body= Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.465810 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.56:6443/healthz\": read tcp 10.217.0.2:60420->10.217.0.56:6443: read: connection reset by peer" Dec 04 17:32:39 crc kubenswrapper[4631]: E1204 17:32:39.466279 4631 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/events\": dial tcp 38.102.83.194:6443: connect: connection refused" event=< Dec 04 17:32:39 crc kubenswrapper[4631]: &Event{ObjectMeta:{oauth-openshift-cc7989dc6-mvfhk.187e13837edbd907 openshift-authentication 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-authentication,Name:oauth-openshift-cc7989dc6-mvfhk,UID:0e227500-d232-4c7b-b78e-24f66d88f82a,APIVersion:v1,ResourceVersion:29295,FieldPath:spec.containers{oauth-openshift},},Reason:ProbeError,Message:Readiness probe error: Get "https://10.217.0.56:6443/healthz": read tcp 10.217.0.2:60420->10.217.0.56:6443: read: connection reset by peer Dec 04 17:32:39 crc kubenswrapper[4631]: body: Dec 04 17:32:39 crc kubenswrapper[4631]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-04 17:32:39.465793799 +0000 UTC m=+289.498035797,LastTimestamp:2025-12-04 17:32:39.465793799 +0000 UTC m=+289.498035797,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Dec 04 17:32:39 crc kubenswrapper[4631]: > Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.481501 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8kcm9\" (UniqueName: \"kubernetes.io/projected/7c6c775a-ae4c-4682-97fd-5f9e4457f8fd-kube-api-access-8kcm9\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.481526 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c6c775a-ae4c-4682-97fd-5f9e4457f8fd-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.488597 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c6c775a-ae4c-4682-97fd-5f9e4457f8fd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" (UID: "7c6c775a-ae4c-4682-97fd-5f9e4457f8fd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.582992 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c6c775a-ae4c-4682-97fd-5f9e4457f8fd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.625601 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.626527 4631 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad" exitCode=0 Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.626672 4631 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a" exitCode=0 Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.626750 4631 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e" exitCode=0 Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.627165 4631 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f" exitCode=2 Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.630324 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-cc7989dc6-mvfhk_0e227500-d232-4c7b-b78e-24f66d88f82a/oauth-openshift/0.log" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.630615 4631 generic.go:334] "Generic (PLEG): container finished" podID="0e227500-d232-4c7b-b78e-24f66d88f82a" containerID="96bbb66d0af03636e5c22c9cdce8fc0906f21172253adaca855716006c5d273a" exitCode=255 Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.630733 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" event={"ID":"0e227500-d232-4c7b-b78e-24f66d88f82a","Type":"ContainerDied","Data":"96bbb66d0af03636e5c22c9cdce8fc0906f21172253adaca855716006c5d273a"} Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.631483 4631 scope.go:117] "RemoveContainer" containerID="96bbb66d0af03636e5c22c9cdce8fc0906f21172253adaca855716006c5d273a" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.632288 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.632682 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.632911 4631 status_manager.go:851] "Failed to get status for pod" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" pod="openshift-marketplace/redhat-operators-bxvz7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-bxvz7\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.633309 4631 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.635925 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxvz7" event={"ID":"7c6c775a-ae4c-4682-97fd-5f9e4457f8fd","Type":"ContainerDied","Data":"479421fb35d9c0198e7990411b9f8af223055d019b9ca1f1e9f2b32521560e69"} Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.635964 4631 scope.go:117] "RemoveContainer" containerID="c54d41a82213ea8f32b326c3427b5324d33ebf29a25561b5d5afcb04fc7be8d4" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.635974 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bxvz7" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.638065 4631 generic.go:334] "Generic (PLEG): container finished" podID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" containerID="c8febd5b53f3bd77cf7bc8ff282eb5f25fc8ccb617017a7420ca00670fcde94c" exitCode=0 Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.638140 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8fc9fdca-bb86-41d3-a800-2996fdeea0fd","Type":"ContainerDied","Data":"c8febd5b53f3bd77cf7bc8ff282eb5f25fc8ccb617017a7420ca00670fcde94c"} Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.639651 4631 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.640232 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.640712 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.640950 4631 status_manager.go:851] "Failed to get status for pod" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" pod="openshift-marketplace/redhat-operators-bxvz7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-bxvz7\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.641265 4631 status_manager.go:851] "Failed to get status for pod" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" pod="openshift-marketplace/redhat-operators-bxvz7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-bxvz7\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.641600 4631 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.641775 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.642021 4631 status_manager.go:851] "Failed to get status for pod" podUID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.642448 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.649633 4631 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.649897 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.650069 4631 status_manager.go:851] "Failed to get status for pod" podUID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.650234 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.650411 4631 status_manager.go:851] "Failed to get status for pod" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" pod="openshift-marketplace/redhat-operators-bxvz7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-bxvz7\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.660308 4631 scope.go:117] "RemoveContainer" containerID="bfc5b219c97a31400861a947a35027550f0d57badd985180391465bcf6b1f63d" Dec 04 17:32:39 crc kubenswrapper[4631]: I1204 17:32:39.695321 4631 scope.go:117] "RemoveContainer" containerID="a8c8c6cc3f883c80dbff5171aeb66f2aef48a8e2faf890d8e222831c1a37f736" Dec 04 17:32:40 crc kubenswrapper[4631]: I1204 17:32:40.242427 4631 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:40 crc kubenswrapper[4631]: I1204 17:32:40.243520 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:40 crc kubenswrapper[4631]: I1204 17:32:40.244056 4631 status_manager.go:851] "Failed to get status for pod" podUID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:40 crc kubenswrapper[4631]: I1204 17:32:40.244346 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:40 crc kubenswrapper[4631]: I1204 17:32:40.244623 4631 status_manager.go:851] "Failed to get status for pod" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" pod="openshift-marketplace/redhat-operators-bxvz7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-bxvz7\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:40 crc kubenswrapper[4631]: I1204 17:32:40.646870 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-cc7989dc6-mvfhk_0e227500-d232-4c7b-b78e-24f66d88f82a/oauth-openshift/1.log" Dec 04 17:32:40 crc kubenswrapper[4631]: I1204 17:32:40.649112 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-cc7989dc6-mvfhk_0e227500-d232-4c7b-b78e-24f66d88f82a/oauth-openshift/0.log" Dec 04 17:32:40 crc kubenswrapper[4631]: I1204 17:32:40.649164 4631 generic.go:334] "Generic (PLEG): container finished" podID="0e227500-d232-4c7b-b78e-24f66d88f82a" containerID="b1756f83cc992c6777526ef7ef6506e16324e66f6328138510b8937bac4e46a4" exitCode=255 Dec 04 17:32:40 crc kubenswrapper[4631]: I1204 17:32:40.649224 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" event={"ID":"0e227500-d232-4c7b-b78e-24f66d88f82a","Type":"ContainerDied","Data":"b1756f83cc992c6777526ef7ef6506e16324e66f6328138510b8937bac4e46a4"} Dec 04 17:32:40 crc kubenswrapper[4631]: I1204 17:32:40.649258 4631 scope.go:117] "RemoveContainer" containerID="96bbb66d0af03636e5c22c9cdce8fc0906f21172253adaca855716006c5d273a" Dec 04 17:32:40 crc kubenswrapper[4631]: I1204 17:32:40.649798 4631 scope.go:117] "RemoveContainer" containerID="b1756f83cc992c6777526ef7ef6506e16324e66f6328138510b8937bac4e46a4" Dec 04 17:32:40 crc kubenswrapper[4631]: I1204 17:32:40.650179 4631 status_manager.go:851] "Failed to get status for pod" podUID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:40 crc kubenswrapper[4631]: E1204 17:32:40.650328 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 10s restarting failed container=oauth-openshift pod=oauth-openshift-cc7989dc6-mvfhk_openshift-authentication(0e227500-d232-4c7b-b78e-24f66d88f82a)\"" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" Dec 04 17:32:40 crc kubenswrapper[4631]: I1204 17:32:40.653191 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:40 crc kubenswrapper[4631]: I1204 17:32:40.653906 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:40 crc kubenswrapper[4631]: I1204 17:32:40.655299 4631 status_manager.go:851] "Failed to get status for pod" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" pod="openshift-marketplace/redhat-operators-bxvz7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-bxvz7\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.006001 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.007685 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.008444 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.009356 4631 status_manager.go:851] "Failed to get status for pod" podUID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.009936 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.010423 4631 status_manager.go:851] "Failed to get status for pod" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" pod="openshift-marketplace/redhat-operators-bxvz7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-bxvz7\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.010839 4631 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.012093 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.012683 4631 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.013087 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.013386 4631 status_manager.go:851] "Failed to get status for pod" podUID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.013698 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.014128 4631 status_manager.go:851] "Failed to get status for pod" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" pod="openshift-marketplace/redhat-operators-bxvz7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-bxvz7\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.103184 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8fc9fdca-bb86-41d3-a800-2996fdeea0fd-var-lock\") pod \"8fc9fdca-bb86-41d3-a800-2996fdeea0fd\" (UID: \"8fc9fdca-bb86-41d3-a800-2996fdeea0fd\") " Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.103318 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.103366 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8fc9fdca-bb86-41d3-a800-2996fdeea0fd-kubelet-dir\") pod \"8fc9fdca-bb86-41d3-a800-2996fdeea0fd\" (UID: \"8fc9fdca-bb86-41d3-a800-2996fdeea0fd\") " Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.103429 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8fc9fdca-bb86-41d3-a800-2996fdeea0fd-kube-api-access\") pod \"8fc9fdca-bb86-41d3-a800-2996fdeea0fd\" (UID: \"8fc9fdca-bb86-41d3-a800-2996fdeea0fd\") " Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.103492 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.103512 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.103790 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.103861 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8fc9fdca-bb86-41d3-a800-2996fdeea0fd-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "8fc9fdca-bb86-41d3-a800-2996fdeea0fd" (UID: "8fc9fdca-bb86-41d3-a800-2996fdeea0fd"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.103963 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.103985 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8fc9fdca-bb86-41d3-a800-2996fdeea0fd-var-lock" (OuterVolumeSpecName: "var-lock") pod "8fc9fdca-bb86-41d3-a800-2996fdeea0fd" (UID: "8fc9fdca-bb86-41d3-a800-2996fdeea0fd"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.104003 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.111251 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fc9fdca-bb86-41d3-a800-2996fdeea0fd-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "8fc9fdca-bb86-41d3-a800-2996fdeea0fd" (UID: "8fc9fdca-bb86-41d3-a800-2996fdeea0fd"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.204859 4631 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.204900 4631 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8fc9fdca-bb86-41d3-a800-2996fdeea0fd-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.204913 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8fc9fdca-bb86-41d3-a800-2996fdeea0fd-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.204924 4631 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.204932 4631 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.204940 4631 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8fc9fdca-bb86-41d3-a800-2996fdeea0fd-var-lock\") on node \"crc\" DevicePath \"\"" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.689977 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.692283 4631 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284" exitCode=0 Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.692431 4631 scope.go:117] "RemoveContainer" containerID="89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.692449 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.697615 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-cc7989dc6-mvfhk_0e227500-d232-4c7b-b78e-24f66d88f82a/oauth-openshift/1.log" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.698254 4631 scope.go:117] "RemoveContainer" containerID="b1756f83cc992c6777526ef7ef6506e16324e66f6328138510b8937bac4e46a4" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.698330 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: E1204 17:32:41.698685 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 10s restarting failed container=oauth-openshift pod=oauth-openshift-cc7989dc6-mvfhk_openshift-authentication(0e227500-d232-4c7b-b78e-24f66d88f82a)\"" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.698729 4631 status_manager.go:851] "Failed to get status for pod" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" pod="openshift-marketplace/redhat-operators-bxvz7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-bxvz7\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.698986 4631 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.699201 4631 status_manager.go:851] "Failed to get status for pod" podUID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.699556 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.700805 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8fc9fdca-bb86-41d3-a800-2996fdeea0fd","Type":"ContainerDied","Data":"bb89800d381992364bb060cdb6837a400797f88899febaba1be5be796e3f5bbc"} Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.700836 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb89800d381992364bb060cdb6837a400797f88899febaba1be5be796e3f5bbc" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.700893 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.713630 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.713910 4631 status_manager.go:851] "Failed to get status for pod" podUID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.714055 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.714213 4631 status_manager.go:851] "Failed to get status for pod" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" pod="openshift-marketplace/redhat-operators-bxvz7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-bxvz7\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.714346 4631 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.718735 4631 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.718902 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.719079 4631 status_manager.go:851] "Failed to get status for pod" podUID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.719243 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.719443 4631 status_manager.go:851] "Failed to get status for pod" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" pod="openshift-marketplace/redhat-operators-bxvz7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-bxvz7\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.727834 4631 scope.go:117] "RemoveContainer" containerID="e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.753914 4631 scope.go:117] "RemoveContainer" containerID="a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.767152 4631 scope.go:117] "RemoveContainer" containerID="20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.782145 4631 scope.go:117] "RemoveContainer" containerID="7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.806786 4631 scope.go:117] "RemoveContainer" containerID="22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.840142 4631 scope.go:117] "RemoveContainer" containerID="89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad" Dec 04 17:32:41 crc kubenswrapper[4631]: E1204 17:32:41.840915 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\": container with ID starting with 89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad not found: ID does not exist" containerID="89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.840972 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad"} err="failed to get container status \"89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\": rpc error: code = NotFound desc = could not find container \"89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad\": container with ID starting with 89450e4853cb61d14fe662716950b71673d5e7d09ec5f33bf8d1229726099fad not found: ID does not exist" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.841006 4631 scope.go:117] "RemoveContainer" containerID="e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a" Dec 04 17:32:41 crc kubenswrapper[4631]: E1204 17:32:41.841409 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\": container with ID starting with e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a not found: ID does not exist" containerID="e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.841448 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a"} err="failed to get container status \"e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\": rpc error: code = NotFound desc = could not find container \"e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a\": container with ID starting with e0b9486b35e39b109547880ad518f0aa287eb4c19bdd6a7754d953d94b93506a not found: ID does not exist" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.841493 4631 scope.go:117] "RemoveContainer" containerID="a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e" Dec 04 17:32:41 crc kubenswrapper[4631]: E1204 17:32:41.841986 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\": container with ID starting with a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e not found: ID does not exist" containerID="a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.842029 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e"} err="failed to get container status \"a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\": rpc error: code = NotFound desc = could not find container \"a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e\": container with ID starting with a6420a7e1cd5284134dc3b28831de325238b487737b43de0a7719d816e9cd41e not found: ID does not exist" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.842051 4631 scope.go:117] "RemoveContainer" containerID="20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f" Dec 04 17:32:41 crc kubenswrapper[4631]: E1204 17:32:41.842527 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\": container with ID starting with 20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f not found: ID does not exist" containerID="20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.842555 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f"} err="failed to get container status \"20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\": rpc error: code = NotFound desc = could not find container \"20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f\": container with ID starting with 20bec2daa5f9c4b9eb2c0b9b337e2fb5847d73b1f09a3b7073360ac5df43366f not found: ID does not exist" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.842571 4631 scope.go:117] "RemoveContainer" containerID="7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284" Dec 04 17:32:41 crc kubenswrapper[4631]: E1204 17:32:41.842893 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\": container with ID starting with 7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284 not found: ID does not exist" containerID="7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.842997 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284"} err="failed to get container status \"7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\": rpc error: code = NotFound desc = could not find container \"7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284\": container with ID starting with 7a26d7a850f4b6ff7681110cd01153634478703b8cd189fd9971138fa0079284 not found: ID does not exist" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.843082 4631 scope.go:117] "RemoveContainer" containerID="22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52" Dec 04 17:32:41 crc kubenswrapper[4631]: E1204 17:32:41.843476 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\": container with ID starting with 22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52 not found: ID does not exist" containerID="22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52" Dec 04 17:32:41 crc kubenswrapper[4631]: I1204 17:32:41.843507 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52"} err="failed to get container status \"22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\": rpc error: code = NotFound desc = could not find container \"22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52\": container with ID starting with 22be7d173b2ad10f1eb4f6edb95b860b42a1f54fd90cdfa33552bc0e8f6f6b52 not found: ID does not exist" Dec 04 17:32:42 crc kubenswrapper[4631]: I1204 17:32:42.245801 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 04 17:32:43 crc kubenswrapper[4631]: E1204 17:32:43.563222 4631 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.194:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:43 crc kubenswrapper[4631]: I1204 17:32:43.564448 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:43 crc kubenswrapper[4631]: I1204 17:32:43.720118 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"9465732ad3649d34afc2a9fdfa3ee96d8115ddf5b8c55aa08ce4739e0f5fb18f"} Dec 04 17:32:44 crc kubenswrapper[4631]: E1204 17:32:44.481615 4631 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/events\": dial tcp 38.102.83.194:6443: connect: connection refused" event=< Dec 04 17:32:44 crc kubenswrapper[4631]: &Event{ObjectMeta:{oauth-openshift-cc7989dc6-mvfhk.187e13837edbd907 openshift-authentication 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-authentication,Name:oauth-openshift-cc7989dc6-mvfhk,UID:0e227500-d232-4c7b-b78e-24f66d88f82a,APIVersion:v1,ResourceVersion:29295,FieldPath:spec.containers{oauth-openshift},},Reason:ProbeError,Message:Readiness probe error: Get "https://10.217.0.56:6443/healthz": read tcp 10.217.0.2:60420->10.217.0.56:6443: read: connection reset by peer Dec 04 17:32:44 crc kubenswrapper[4631]: body: Dec 04 17:32:44 crc kubenswrapper[4631]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-04 17:32:39.465793799 +0000 UTC m=+289.498035797,LastTimestamp:2025-12-04 17:32:39.465793799 +0000 UTC m=+289.498035797,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Dec 04 17:32:44 crc kubenswrapper[4631]: > Dec 04 17:32:44 crc kubenswrapper[4631]: I1204 17:32:44.726950 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"10eae845693d459790c8476b105982ae870ae89a3d6d7379ac06d36483738b8a"} Dec 04 17:32:44 crc kubenswrapper[4631]: I1204 17:32:44.727662 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:44 crc kubenswrapper[4631]: E1204 17:32:44.727677 4631 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.194:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:44 crc kubenswrapper[4631]: I1204 17:32:44.727871 4631 status_manager.go:851] "Failed to get status for pod" podUID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:44 crc kubenswrapper[4631]: I1204 17:32:44.728023 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:44 crc kubenswrapper[4631]: I1204 17:32:44.728843 4631 status_manager.go:851] "Failed to get status for pod" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" pod="openshift-marketplace/redhat-operators-bxvz7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-bxvz7\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:45 crc kubenswrapper[4631]: E1204 17:32:45.734285 4631 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.194:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:32:47 crc kubenswrapper[4631]: I1204 17:32:47.239426 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:47 crc kubenswrapper[4631]: I1204 17:32:47.239729 4631 status_manager.go:851] "Failed to get status for pod" podUID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:47 crc kubenswrapper[4631]: I1204 17:32:47.240015 4631 status_manager.go:851] "Failed to get status for pod" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" pod="openshift-marketplace/certified-operators-l85pg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-l85pg\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:47 crc kubenswrapper[4631]: I1204 17:32:47.240521 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:47 crc kubenswrapper[4631]: I1204 17:32:47.240732 4631 status_manager.go:851] "Failed to get status for pod" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" pod="openshift-marketplace/redhat-operators-bxvz7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-bxvz7\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:47 crc kubenswrapper[4631]: I1204 17:32:47.311362 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:47 crc kubenswrapper[4631]: I1204 17:32:47.311570 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:32:47 crc kubenswrapper[4631]: I1204 17:32:47.312532 4631 scope.go:117] "RemoveContainer" containerID="b1756f83cc992c6777526ef7ef6506e16324e66f6328138510b8937bac4e46a4" Dec 04 17:32:47 crc kubenswrapper[4631]: E1204 17:32:47.312851 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 10s restarting failed container=oauth-openshift pod=oauth-openshift-cc7989dc6-mvfhk_openshift-authentication(0e227500-d232-4c7b-b78e-24f66d88f82a)\"" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" Dec 04 17:32:47 crc kubenswrapper[4631]: E1204 17:32:47.711504 4631 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:47 crc kubenswrapper[4631]: E1204 17:32:47.712081 4631 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:47 crc kubenswrapper[4631]: E1204 17:32:47.712619 4631 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:47 crc kubenswrapper[4631]: E1204 17:32:47.713088 4631 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:47 crc kubenswrapper[4631]: E1204 17:32:47.713614 4631 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:47 crc kubenswrapper[4631]: I1204 17:32:47.713690 4631 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 04 17:32:47 crc kubenswrapper[4631]: E1204 17:32:47.714084 4631 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" interval="200ms" Dec 04 17:32:47 crc kubenswrapper[4631]: I1204 17:32:47.746174 4631 scope.go:117] "RemoveContainer" containerID="b1756f83cc992c6777526ef7ef6506e16324e66f6328138510b8937bac4e46a4" Dec 04 17:32:47 crc kubenswrapper[4631]: E1204 17:32:47.746588 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 10s restarting failed container=oauth-openshift pod=oauth-openshift-cc7989dc6-mvfhk_openshift-authentication(0e227500-d232-4c7b-b78e-24f66d88f82a)\"" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" Dec 04 17:32:47 crc kubenswrapper[4631]: E1204 17:32:47.915655 4631 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" interval="400ms" Dec 04 17:32:48 crc kubenswrapper[4631]: E1204 17:32:48.316213 4631 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" interval="800ms" Dec 04 17:32:48 crc kubenswrapper[4631]: I1204 17:32:48.753306 4631 generic.go:334] "Generic (PLEG): container finished" podID="5cb73c44-e995-4e73-9bd3-422c00633ddf" containerID="3b8f8d6904928b5ed8ff52dc263e96b54f8c12021d69df5f307ca2441ec5b868" exitCode=0 Dec 04 17:32:48 crc kubenswrapper[4631]: I1204 17:32:48.753354 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l85pg" event={"ID":"5cb73c44-e995-4e73-9bd3-422c00633ddf","Type":"ContainerDied","Data":"3b8f8d6904928b5ed8ff52dc263e96b54f8c12021d69df5f307ca2441ec5b868"} Dec 04 17:32:48 crc kubenswrapper[4631]: I1204 17:32:48.755131 4631 status_manager.go:851] "Failed to get status for pod" podUID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:48 crc kubenswrapper[4631]: I1204 17:32:48.755785 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:48 crc kubenswrapper[4631]: I1204 17:32:48.756405 4631 status_manager.go:851] "Failed to get status for pod" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" pod="openshift-marketplace/certified-operators-l85pg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-l85pg\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:48 crc kubenswrapper[4631]: I1204 17:32:48.757668 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:48 crc kubenswrapper[4631]: I1204 17:32:48.758101 4631 status_manager.go:851] "Failed to get status for pod" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" pod="openshift-marketplace/redhat-operators-bxvz7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-bxvz7\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:48 crc kubenswrapper[4631]: E1204 17:32:48.947791 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:32:48Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:32:48Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:32:48Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-04T17:32:48Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:76fa3a070e2d266753455b1f5cdc0275f55a5492ac0eb2a953655259ed14e122\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:d7d1951816ca8fc0605d2e29f392d6b1ef65d09ea9454f3c3f9972568e0eb00b\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1610200900},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:18451dc6c69687adf6c276ed6a28aa8c7ab6e963bf16e3be5074938ed0a05bf0\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:3451c27f73d0c1e1ee92e475fc2228a4ddacfef2189ed3613dcffd2fdedc88e3\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1207237969},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:9932555c9cde7a4fafbb069ced4e1874d584d0161ef1681710ba649966fa3aed\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:eb617b21d6eac1158f0772431048128991ac63dea611ddc67d9594df748921e7\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1201438029},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:e8990432556acad31519b1a73ec32f32d27c2034cf9e5cc4db8980efc7331594\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:ebe9f523f5c211a3a0f2570331dddcd5be15b12c1fecd9b8b121f881bfaad029\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1129027903},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:48 crc kubenswrapper[4631]: E1204 17:32:48.948631 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:48 crc kubenswrapper[4631]: E1204 17:32:48.949033 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:48 crc kubenswrapper[4631]: E1204 17:32:48.949424 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:48 crc kubenswrapper[4631]: E1204 17:32:48.949738 4631 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:48 crc kubenswrapper[4631]: E1204 17:32:48.949762 4631 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 04 17:32:49 crc kubenswrapper[4631]: E1204 17:32:49.117240 4631 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" interval="1.6s" Dec 04 17:32:49 crc kubenswrapper[4631]: I1204 17:32:49.764538 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l85pg" event={"ID":"5cb73c44-e995-4e73-9bd3-422c00633ddf","Type":"ContainerStarted","Data":"a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360"} Dec 04 17:32:49 crc kubenswrapper[4631]: I1204 17:32:49.766087 4631 status_manager.go:851] "Failed to get status for pod" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" pod="openshift-marketplace/certified-operators-l85pg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-l85pg\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:49 crc kubenswrapper[4631]: I1204 17:32:49.766901 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:49 crc kubenswrapper[4631]: I1204 17:32:49.767533 4631 status_manager.go:851] "Failed to get status for pod" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" pod="openshift-marketplace/redhat-operators-bxvz7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-bxvz7\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:49 crc kubenswrapper[4631]: I1204 17:32:49.768040 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:49 crc kubenswrapper[4631]: I1204 17:32:49.768530 4631 status_manager.go:851] "Failed to get status for pod" podUID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:50 crc kubenswrapper[4631]: I1204 17:32:50.137794 4631 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 04 17:32:50 crc kubenswrapper[4631]: I1204 17:32:50.210133 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-l85pg" Dec 04 17:32:50 crc kubenswrapper[4631]: I1204 17:32:50.210404 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-l85pg" Dec 04 17:32:50 crc kubenswrapper[4631]: I1204 17:32:50.249418 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:50 crc kubenswrapper[4631]: I1204 17:32:50.249868 4631 status_manager.go:851] "Failed to get status for pod" podUID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:50 crc kubenswrapper[4631]: I1204 17:32:50.252791 4631 status_manager.go:851] "Failed to get status for pod" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" pod="openshift-marketplace/certified-operators-l85pg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-l85pg\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:50 crc kubenswrapper[4631]: I1204 17:32:50.253010 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:50 crc kubenswrapper[4631]: I1204 17:32:50.253455 4631 status_manager.go:851] "Failed to get status for pod" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" pod="openshift-marketplace/redhat-operators-bxvz7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-bxvz7\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:50 crc kubenswrapper[4631]: E1204 17:32:50.718157 4631 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.194:6443: connect: connection refused" interval="3.2s" Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.238547 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.240332 4631 status_manager.go:851] "Failed to get status for pod" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" pod="openshift-marketplace/redhat-operators-bxvz7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-bxvz7\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.240805 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.241115 4631 status_manager.go:851] "Failed to get status for pod" podUID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.241784 4631 status_manager.go:851] "Failed to get status for pod" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" pod="openshift-marketplace/certified-operators-l85pg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-l85pg\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.242645 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.259161 4631 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cca995e5-9ce2-4996-b131-60c489ff721e" Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.259209 4631 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cca995e5-9ce2-4996-b131-60c489ff721e" Dec 04 17:32:51 crc kubenswrapper[4631]: E1204 17:32:51.259835 4631 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.260727 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.262120 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-l85pg" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" containerName="registry-server" probeResult="failure" output=< Dec 04 17:32:51 crc kubenswrapper[4631]: timeout: failed to connect service ":50051" within 1s Dec 04 17:32:51 crc kubenswrapper[4631]: > Dec 04 17:32:51 crc kubenswrapper[4631]: W1204 17:32:51.298763 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-bb4a329ae9dae336eb9bc34cc4c0c98cb05b1d6f6eaefadcd8e5b36db0cb1de8 WatchSource:0}: Error finding container bb4a329ae9dae336eb9bc34cc4c0c98cb05b1d6f6eaefadcd8e5b36db0cb1de8: Status 404 returned error can't find the container with id bb4a329ae9dae336eb9bc34cc4c0c98cb05b1d6f6eaefadcd8e5b36db0cb1de8 Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.776410 4631 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="d9570d4463873c19184a792a798765dbfa6479ab9dc41619c7c9c4e22d95e266" exitCode=0 Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.776483 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"d9570d4463873c19184a792a798765dbfa6479ab9dc41619c7c9c4e22d95e266"} Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.776859 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"bb4a329ae9dae336eb9bc34cc4c0c98cb05b1d6f6eaefadcd8e5b36db0cb1de8"} Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.777256 4631 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cca995e5-9ce2-4996-b131-60c489ff721e" Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.777276 4631 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cca995e5-9ce2-4996-b131-60c489ff721e" Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.777771 4631 status_manager.go:851] "Failed to get status for pod" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" pod="openshift-marketplace/community-operators-bvzck" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bvzck\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:51 crc kubenswrapper[4631]: E1204 17:32:51.777788 4631 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.778043 4631 status_manager.go:851] "Failed to get status for pod" podUID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.778322 4631 status_manager.go:851] "Failed to get status for pod" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" pod="openshift-marketplace/certified-operators-l85pg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-l85pg\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.778654 4631 status_manager.go:851] "Failed to get status for pod" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-cc7989dc6-mvfhk\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:51 crc kubenswrapper[4631]: I1204 17:32:51.779066 4631 status_manager.go:851] "Failed to get status for pod" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" pod="openshift-marketplace/redhat-operators-bxvz7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-bxvz7\": dial tcp 38.102.83.194:6443: connect: connection refused" Dec 04 17:32:52 crc kubenswrapper[4631]: I1204 17:32:52.793612 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0bcfc9fc4e2d6e108534673f6f4668c5dd257dc32c92d399c8ec0b0153df7062"} Dec 04 17:32:52 crc kubenswrapper[4631]: I1204 17:32:52.793988 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"914782467edef6392c6058e65df2bb03a3c41ffa23203acff73b108a8e2efb03"} Dec 04 17:32:52 crc kubenswrapper[4631]: I1204 17:32:52.794000 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"4f4cd5f9783b7faf02d5dfee32cdb141eb647f88a2ad51f6205c8f551aa5a39e"} Dec 04 17:32:52 crc kubenswrapper[4631]: I1204 17:32:52.794010 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a2a6365b588b8c8a9ba352ae1d517b1e786c56d20fee7cf62071b04f800638f9"} Dec 04 17:32:53 crc kubenswrapper[4631]: I1204 17:32:53.800859 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 04 17:32:53 crc kubenswrapper[4631]: I1204 17:32:53.800918 4631 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a" exitCode=1 Dec 04 17:32:53 crc kubenswrapper[4631]: I1204 17:32:53.800981 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a"} Dec 04 17:32:53 crc kubenswrapper[4631]: I1204 17:32:53.801537 4631 scope.go:117] "RemoveContainer" containerID="1105dd5b358cf39c7f9bcbe5fe6590c8c38eaf3f6db5adb4f37d90881fdfcd6a" Dec 04 17:32:53 crc kubenswrapper[4631]: I1204 17:32:53.804140 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"eede498136b19f4eff8138d69e5b6dd175bc55f09dc5d2a5dad5ed7971212a0d"} Dec 04 17:32:53 crc kubenswrapper[4631]: I1204 17:32:53.804306 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:53 crc kubenswrapper[4631]: I1204 17:32:53.804425 4631 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cca995e5-9ce2-4996-b131-60c489ff721e" Dec 04 17:32:53 crc kubenswrapper[4631]: I1204 17:32:53.804447 4631 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cca995e5-9ce2-4996-b131-60c489ff721e" Dec 04 17:32:54 crc kubenswrapper[4631]: I1204 17:32:54.811823 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 04 17:32:54 crc kubenswrapper[4631]: I1204 17:32:54.812313 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ada1a3525cfb4ea66ee6fcc6a81fe2318b3cfdb5322a1553b41223f5c7a0b02e"} Dec 04 17:32:56 crc kubenswrapper[4631]: I1204 17:32:56.262064 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:56 crc kubenswrapper[4631]: I1204 17:32:56.262757 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:56 crc kubenswrapper[4631]: I1204 17:32:56.268754 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:56 crc kubenswrapper[4631]: I1204 17:32:56.462918 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 04 17:32:58 crc kubenswrapper[4631]: I1204 17:32:58.017249 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:32:58 crc kubenswrapper[4631]: I1204 17:32:58.023727 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:32:58 crc kubenswrapper[4631]: I1204 17:32:58.822589 4631 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:32:58 crc kubenswrapper[4631]: I1204 17:32:58.835425 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:32:59 crc kubenswrapper[4631]: I1204 17:32:59.840188 4631 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cca995e5-9ce2-4996-b131-60c489ff721e" Dec 04 17:32:59 crc kubenswrapper[4631]: I1204 17:32:59.840219 4631 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cca995e5-9ce2-4996-b131-60c489ff721e" Dec 04 17:32:59 crc kubenswrapper[4631]: I1204 17:32:59.846908 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:33:00 crc kubenswrapper[4631]: I1204 17:33:00.258889 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-l85pg" Dec 04 17:33:00 crc kubenswrapper[4631]: I1204 17:33:00.260951 4631 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="b349c4c3-c8c6-453a-86fe-c4e60af5cd5e" Dec 04 17:33:00 crc kubenswrapper[4631]: I1204 17:33:00.297437 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-l85pg" Dec 04 17:33:00 crc kubenswrapper[4631]: I1204 17:33:00.846975 4631 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cca995e5-9ce2-4996-b131-60c489ff721e" Dec 04 17:33:00 crc kubenswrapper[4631]: I1204 17:33:00.847006 4631 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="cca995e5-9ce2-4996-b131-60c489ff721e" Dec 04 17:33:00 crc kubenswrapper[4631]: I1204 17:33:00.850043 4631 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="b349c4c3-c8c6-453a-86fe-c4e60af5cd5e" Dec 04 17:33:01 crc kubenswrapper[4631]: I1204 17:33:01.239846 4631 scope.go:117] "RemoveContainer" containerID="b1756f83cc992c6777526ef7ef6506e16324e66f6328138510b8937bac4e46a4" Dec 04 17:33:01 crc kubenswrapper[4631]: I1204 17:33:01.855254 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-cc7989dc6-mvfhk_0e227500-d232-4c7b-b78e-24f66d88f82a/oauth-openshift/1.log" Dec 04 17:33:01 crc kubenswrapper[4631]: I1204 17:33:01.855821 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" event={"ID":"0e227500-d232-4c7b-b78e-24f66d88f82a","Type":"ContainerStarted","Data":"a8ffddb506b84269d5c0a119c33b34c445a6b55b13d82cbf88032005aef584f6"} Dec 04 17:33:01 crc kubenswrapper[4631]: I1204 17:33:01.857309 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:33:01 crc kubenswrapper[4631]: I1204 17:33:01.857582 4631 patch_prober.go:28] interesting pod/oauth-openshift-cc7989dc6-mvfhk container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.56:6443/healthz\": dial tcp 10.217.0.56:6443: connect: connection refused" start-of-body= Dec 04 17:33:01 crc kubenswrapper[4631]: I1204 17:33:01.857725 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.56:6443/healthz\": dial tcp 10.217.0.56:6443: connect: connection refused" Dec 04 17:33:02 crc kubenswrapper[4631]: I1204 17:33:02.863534 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-cc7989dc6-mvfhk_0e227500-d232-4c7b-b78e-24f66d88f82a/oauth-openshift/2.log" Dec 04 17:33:02 crc kubenswrapper[4631]: I1204 17:33:02.864304 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-cc7989dc6-mvfhk_0e227500-d232-4c7b-b78e-24f66d88f82a/oauth-openshift/1.log" Dec 04 17:33:02 crc kubenswrapper[4631]: I1204 17:33:02.864353 4631 generic.go:334] "Generic (PLEG): container finished" podID="0e227500-d232-4c7b-b78e-24f66d88f82a" containerID="a8ffddb506b84269d5c0a119c33b34c445a6b55b13d82cbf88032005aef584f6" exitCode=255 Dec 04 17:33:02 crc kubenswrapper[4631]: I1204 17:33:02.864415 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" event={"ID":"0e227500-d232-4c7b-b78e-24f66d88f82a","Type":"ContainerDied","Data":"a8ffddb506b84269d5c0a119c33b34c445a6b55b13d82cbf88032005aef584f6"} Dec 04 17:33:02 crc kubenswrapper[4631]: I1204 17:33:02.864452 4631 scope.go:117] "RemoveContainer" containerID="b1756f83cc992c6777526ef7ef6506e16324e66f6328138510b8937bac4e46a4" Dec 04 17:33:02 crc kubenswrapper[4631]: I1204 17:33:02.864987 4631 scope.go:117] "RemoveContainer" containerID="a8ffddb506b84269d5c0a119c33b34c445a6b55b13d82cbf88032005aef584f6" Dec 04 17:33:02 crc kubenswrapper[4631]: E1204 17:33:02.865187 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 20s restarting failed container=oauth-openshift pod=oauth-openshift-cc7989dc6-mvfhk_openshift-authentication(0e227500-d232-4c7b-b78e-24f66d88f82a)\"" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" Dec 04 17:33:03 crc kubenswrapper[4631]: I1204 17:33:03.870937 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-cc7989dc6-mvfhk_0e227500-d232-4c7b-b78e-24f66d88f82a/oauth-openshift/2.log" Dec 04 17:33:03 crc kubenswrapper[4631]: I1204 17:33:03.871548 4631 scope.go:117] "RemoveContainer" containerID="a8ffddb506b84269d5c0a119c33b34c445a6b55b13d82cbf88032005aef584f6" Dec 04 17:33:03 crc kubenswrapper[4631]: E1204 17:33:03.871854 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 20s restarting failed container=oauth-openshift pod=oauth-openshift-cc7989dc6-mvfhk_openshift-authentication(0e227500-d232-4c7b-b78e-24f66d88f82a)\"" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" Dec 04 17:33:07 crc kubenswrapper[4631]: I1204 17:33:07.311480 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:33:07 crc kubenswrapper[4631]: I1204 17:33:07.312508 4631 scope.go:117] "RemoveContainer" containerID="a8ffddb506b84269d5c0a119c33b34c445a6b55b13d82cbf88032005aef584f6" Dec 04 17:33:07 crc kubenswrapper[4631]: E1204 17:33:07.312825 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 20s restarting failed container=oauth-openshift pod=oauth-openshift-cc7989dc6-mvfhk_openshift-authentication(0e227500-d232-4c7b-b78e-24f66d88f82a)\"" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" Dec 04 17:33:08 crc kubenswrapper[4631]: I1204 17:33:08.033104 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 04 17:33:08 crc kubenswrapper[4631]: I1204 17:33:08.990726 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 04 17:33:09 crc kubenswrapper[4631]: I1204 17:33:09.101385 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 04 17:33:10 crc kubenswrapper[4631]: I1204 17:33:10.757589 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 04 17:33:10 crc kubenswrapper[4631]: I1204 17:33:10.887540 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 04 17:33:10 crc kubenswrapper[4631]: I1204 17:33:10.888638 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 04 17:33:11 crc kubenswrapper[4631]: I1204 17:33:11.160664 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 04 17:33:11 crc kubenswrapper[4631]: I1204 17:33:11.262405 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 04 17:33:11 crc kubenswrapper[4631]: I1204 17:33:11.348185 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 04 17:33:11 crc kubenswrapper[4631]: I1204 17:33:11.472069 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 04 17:33:11 crc kubenswrapper[4631]: I1204 17:33:11.688416 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 04 17:33:11 crc kubenswrapper[4631]: I1204 17:33:11.805341 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 04 17:33:11 crc kubenswrapper[4631]: I1204 17:33:11.865971 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 04 17:33:12 crc kubenswrapper[4631]: I1204 17:33:12.048394 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 04 17:33:12 crc kubenswrapper[4631]: I1204 17:33:12.328403 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 04 17:33:12 crc kubenswrapper[4631]: I1204 17:33:12.350944 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 04 17:33:12 crc kubenswrapper[4631]: I1204 17:33:12.475537 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 04 17:33:12 crc kubenswrapper[4631]: I1204 17:33:12.490366 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 04 17:33:12 crc kubenswrapper[4631]: I1204 17:33:12.586514 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 04 17:33:12 crc kubenswrapper[4631]: I1204 17:33:12.613642 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 04 17:33:12 crc kubenswrapper[4631]: I1204 17:33:12.633124 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 04 17:33:12 crc kubenswrapper[4631]: I1204 17:33:12.652013 4631 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 04 17:33:12 crc kubenswrapper[4631]: I1204 17:33:12.665124 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 04 17:33:12 crc kubenswrapper[4631]: I1204 17:33:12.692548 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 04 17:33:12 crc kubenswrapper[4631]: I1204 17:33:12.981550 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 04 17:33:13 crc kubenswrapper[4631]: I1204 17:33:13.022089 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 04 17:33:13 crc kubenswrapper[4631]: I1204 17:33:13.288810 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 04 17:33:13 crc kubenswrapper[4631]: I1204 17:33:13.339313 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 04 17:33:13 crc kubenswrapper[4631]: I1204 17:33:13.438001 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 04 17:33:13 crc kubenswrapper[4631]: I1204 17:33:13.506362 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 04 17:33:13 crc kubenswrapper[4631]: I1204 17:33:13.516616 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 04 17:33:13 crc kubenswrapper[4631]: I1204 17:33:13.525696 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 04 17:33:13 crc kubenswrapper[4631]: I1204 17:33:13.553306 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 04 17:33:13 crc kubenswrapper[4631]: I1204 17:33:13.558908 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 04 17:33:13 crc kubenswrapper[4631]: I1204 17:33:13.561948 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 04 17:33:13 crc kubenswrapper[4631]: I1204 17:33:13.607000 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 04 17:33:13 crc kubenswrapper[4631]: I1204 17:33:13.667524 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 04 17:33:13 crc kubenswrapper[4631]: I1204 17:33:13.762239 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 04 17:33:13 crc kubenswrapper[4631]: I1204 17:33:13.774596 4631 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 04 17:33:13 crc kubenswrapper[4631]: I1204 17:33:13.895831 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 04 17:33:13 crc kubenswrapper[4631]: I1204 17:33:13.896881 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 04 17:33:13 crc kubenswrapper[4631]: I1204 17:33:13.909640 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 04 17:33:13 crc kubenswrapper[4631]: I1204 17:33:13.946721 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.057017 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.118988 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.248025 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.250635 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.264068 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.315745 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.397445 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.406120 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.406435 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.469465 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.471640 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.546759 4631 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.623137 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.908762 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.925320 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.971861 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.973733 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.980930 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.988201 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 04 17:33:14 crc kubenswrapper[4631]: I1204 17:33:14.993043 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.063583 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.064685 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.067039 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.098021 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.159229 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.167575 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.171737 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.254991 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.270993 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.298539 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.366513 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.377795 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.483105 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.586264 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.615103 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.628605 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.630670 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.642812 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.701180 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.735281 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.762255 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.786142 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.807421 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.819325 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.900824 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 04 17:33:15 crc kubenswrapper[4631]: I1204 17:33:15.999568 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.006602 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.015575 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.030300 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.174233 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.199204 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.224626 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.307696 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.385119 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.408536 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.417579 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.426277 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.473354 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.484991 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.517652 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.603813 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.619263 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.644346 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.660354 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.702032 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.709906 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.740087 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.857636 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.914161 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.933586 4631 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 04 17:33:16 crc kubenswrapper[4631]: I1204 17:33:16.956920 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 04 17:33:17 crc kubenswrapper[4631]: I1204 17:33:17.021583 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 04 17:33:17 crc kubenswrapper[4631]: I1204 17:33:17.052088 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 04 17:33:17 crc kubenswrapper[4631]: I1204 17:33:17.085559 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 04 17:33:17 crc kubenswrapper[4631]: I1204 17:33:17.154010 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 04 17:33:17 crc kubenswrapper[4631]: I1204 17:33:17.458028 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 04 17:33:17 crc kubenswrapper[4631]: I1204 17:33:17.477626 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 04 17:33:17 crc kubenswrapper[4631]: I1204 17:33:17.567327 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 04 17:33:17 crc kubenswrapper[4631]: I1204 17:33:17.610164 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 04 17:33:17 crc kubenswrapper[4631]: I1204 17:33:17.671642 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 04 17:33:17 crc kubenswrapper[4631]: I1204 17:33:17.683407 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 04 17:33:17 crc kubenswrapper[4631]: I1204 17:33:17.705329 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 04 17:33:17 crc kubenswrapper[4631]: I1204 17:33:17.766666 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 04 17:33:17 crc kubenswrapper[4631]: I1204 17:33:17.812223 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 04 17:33:17 crc kubenswrapper[4631]: I1204 17:33:17.827734 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 04 17:33:17 crc kubenswrapper[4631]: I1204 17:33:17.839244 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 04 17:33:17 crc kubenswrapper[4631]: I1204 17:33:17.886865 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 04 17:33:17 crc kubenswrapper[4631]: I1204 17:33:17.931740 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.042225 4631 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.049760 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.115581 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.123029 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.192826 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.240489 4631 scope.go:117] "RemoveContainer" containerID="a8ffddb506b84269d5c0a119c33b34c445a6b55b13d82cbf88032005aef584f6" Dec 04 17:33:18 crc kubenswrapper[4631]: E1204 17:33:18.240697 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 20s restarting failed container=oauth-openshift pod=oauth-openshift-cc7989dc6-mvfhk_openshift-authentication(0e227500-d232-4c7b-b78e-24f66d88f82a)\"" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" podUID="0e227500-d232-4c7b-b78e-24f66d88f82a" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.245188 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.284953 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.294915 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.332284 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.347639 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.352344 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.399657 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.421280 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.466769 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.502056 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.539720 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.728960 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.782138 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.795262 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.829026 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.898296 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 04 17:33:18 crc kubenswrapper[4631]: I1204 17:33:18.960692 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 04 17:33:19 crc kubenswrapper[4631]: I1204 17:33:19.047536 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 04 17:33:19 crc kubenswrapper[4631]: I1204 17:33:19.280479 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 04 17:33:19 crc kubenswrapper[4631]: I1204 17:33:19.310947 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 04 17:33:19 crc kubenswrapper[4631]: I1204 17:33:19.426466 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 04 17:33:19 crc kubenswrapper[4631]: I1204 17:33:19.546471 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 04 17:33:19 crc kubenswrapper[4631]: I1204 17:33:19.616326 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 04 17:33:19 crc kubenswrapper[4631]: I1204 17:33:19.639307 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 04 17:33:19 crc kubenswrapper[4631]: I1204 17:33:19.639308 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 04 17:33:19 crc kubenswrapper[4631]: I1204 17:33:19.676408 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 04 17:33:19 crc kubenswrapper[4631]: I1204 17:33:19.698357 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 04 17:33:19 crc kubenswrapper[4631]: I1204 17:33:19.816708 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 04 17:33:19 crc kubenswrapper[4631]: I1204 17:33:19.839744 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 04 17:33:19 crc kubenswrapper[4631]: I1204 17:33:19.904755 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.087509 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.101917 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.110488 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.173581 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.194551 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.211706 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.253784 4631 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.258226 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-l85pg" podStartSLOduration=37.873151078 podStartE2EDuration="2m31.258204879s" podCreationTimestamp="2025-12-04 17:30:49 +0000 UTC" firstStartedPulling="2025-12-04 17:30:55.785628122 +0000 UTC m=+185.817870120" lastFinishedPulling="2025-12-04 17:32:49.170681923 +0000 UTC m=+299.202923921" observedRunningTime="2025-12-04 17:32:58.95717383 +0000 UTC m=+308.989415828" watchObservedRunningTime="2025-12-04 17:33:20.258204879 +0000 UTC m=+330.290446877" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.259856 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-marketplace/community-operators-bvzck","openshift-marketplace/redhat-operators-bxvz7"] Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.259945 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.263648 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.272924 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.279807 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=22.279787466 podStartE2EDuration="22.279787466s" podCreationTimestamp="2025-12-04 17:32:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:33:20.276511679 +0000 UTC m=+330.308753697" watchObservedRunningTime="2025-12-04 17:33:20.279787466 +0000 UTC m=+330.312029474" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.283185 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.289064 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.342044 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.380274 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.452525 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.479898 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.491464 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.543766 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.553601 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.602586 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.613305 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.674504 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.744951 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.841615 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 04 17:33:20 crc kubenswrapper[4631]: I1204 17:33:20.955112 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 04 17:33:21 crc kubenswrapper[4631]: I1204 17:33:21.087069 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 04 17:33:21 crc kubenswrapper[4631]: I1204 17:33:21.143965 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 04 17:33:21 crc kubenswrapper[4631]: I1204 17:33:21.194632 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 04 17:33:21 crc kubenswrapper[4631]: I1204 17:33:21.194647 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 04 17:33:21 crc kubenswrapper[4631]: I1204 17:33:21.198309 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 04 17:33:21 crc kubenswrapper[4631]: I1204 17:33:21.253904 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 04 17:33:21 crc kubenswrapper[4631]: I1204 17:33:21.342886 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 04 17:33:21 crc kubenswrapper[4631]: I1204 17:33:21.372105 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 04 17:33:21 crc kubenswrapper[4631]: I1204 17:33:21.383328 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 04 17:33:21 crc kubenswrapper[4631]: I1204 17:33:21.403951 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 04 17:33:21 crc kubenswrapper[4631]: I1204 17:33:21.428971 4631 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 04 17:33:21 crc kubenswrapper[4631]: I1204 17:33:21.429339 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://10eae845693d459790c8476b105982ae870ae89a3d6d7379ac06d36483738b8a" gracePeriod=5 Dec 04 17:33:21 crc kubenswrapper[4631]: I1204 17:33:21.555190 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 04 17:33:21 crc kubenswrapper[4631]: I1204 17:33:21.724264 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 04 17:33:21 crc kubenswrapper[4631]: I1204 17:33:21.832622 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 04 17:33:21 crc kubenswrapper[4631]: I1204 17:33:21.892625 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 04 17:33:21 crc kubenswrapper[4631]: I1204 17:33:21.910745 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 04 17:33:21 crc kubenswrapper[4631]: I1204 17:33:21.977246 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.040934 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.115457 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.148220 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.162696 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.231863 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.233480 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.245946 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5622ec39-e11e-44c2-b059-47d6fc091328" path="/var/lib/kubelet/pods/5622ec39-e11e-44c2-b059-47d6fc091328/volumes" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.247306 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" path="/var/lib/kubelet/pods/7c6c775a-ae4c-4682-97fd-5f9e4457f8fd/volumes" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.357142 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.357142 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.477974 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.486827 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.575322 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.580996 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.581535 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.622299 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.762309 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.891454 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.989176 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.992553 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 04 17:33:22 crc kubenswrapper[4631]: I1204 17:33:22.998504 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 04 17:33:23 crc kubenswrapper[4631]: I1204 17:33:23.079970 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 04 17:33:23 crc kubenswrapper[4631]: I1204 17:33:23.233551 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 04 17:33:23 crc kubenswrapper[4631]: I1204 17:33:23.415830 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 04 17:33:23 crc kubenswrapper[4631]: I1204 17:33:23.428741 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 04 17:33:23 crc kubenswrapper[4631]: I1204 17:33:23.438190 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 04 17:33:23 crc kubenswrapper[4631]: I1204 17:33:23.475360 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 04 17:33:23 crc kubenswrapper[4631]: I1204 17:33:23.479915 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 04 17:33:23 crc kubenswrapper[4631]: I1204 17:33:23.496138 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 04 17:33:23 crc kubenswrapper[4631]: I1204 17:33:23.754102 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 04 17:33:23 crc kubenswrapper[4631]: I1204 17:33:23.852589 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 04 17:33:24 crc kubenswrapper[4631]: I1204 17:33:24.140622 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 04 17:33:24 crc kubenswrapper[4631]: I1204 17:33:24.248085 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 04 17:33:24 crc kubenswrapper[4631]: I1204 17:33:24.332218 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 04 17:33:24 crc kubenswrapper[4631]: I1204 17:33:24.515209 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 04 17:33:24 crc kubenswrapper[4631]: I1204 17:33:24.572479 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 04 17:33:24 crc kubenswrapper[4631]: I1204 17:33:24.609582 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 04 17:33:24 crc kubenswrapper[4631]: I1204 17:33:24.713892 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 04 17:33:24 crc kubenswrapper[4631]: I1204 17:33:24.778108 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 04 17:33:24 crc kubenswrapper[4631]: I1204 17:33:24.810555 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 04 17:33:24 crc kubenswrapper[4631]: I1204 17:33:24.932342 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 04 17:33:24 crc kubenswrapper[4631]: I1204 17:33:24.981696 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 04 17:33:25 crc kubenswrapper[4631]: I1204 17:33:25.014264 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 04 17:33:25 crc kubenswrapper[4631]: I1204 17:33:25.310827 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 04 17:33:25 crc kubenswrapper[4631]: I1204 17:33:25.656620 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 04 17:33:25 crc kubenswrapper[4631]: I1204 17:33:25.702499 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 04 17:33:25 crc kubenswrapper[4631]: I1204 17:33:25.862731 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 04 17:33:26 crc kubenswrapper[4631]: I1204 17:33:26.111865 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 04 17:33:26 crc kubenswrapper[4631]: I1204 17:33:26.484103 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 04 17:33:26 crc kubenswrapper[4631]: I1204 17:33:26.766237 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.009844 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.010328 4631 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="10eae845693d459790c8476b105982ae870ae89a3d6d7379ac06d36483738b8a" exitCode=137 Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.010360 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9465732ad3649d34afc2a9fdfa3ee96d8115ddf5b8c55aa08ce4739e0f5fb18f" Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.031948 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.032060 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.207597 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.207640 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.207677 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.207690 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.207719 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.207733 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.207763 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.207941 4631 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.207954 4631 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.207980 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.208006 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.219568 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.309520 4631 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.309556 4631 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 04 17:33:27 crc kubenswrapper[4631]: I1204 17:33:27.309565 4631 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 04 17:33:28 crc kubenswrapper[4631]: I1204 17:33:28.017432 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 04 17:33:28 crc kubenswrapper[4631]: I1204 17:33:28.248236 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 04 17:33:29 crc kubenswrapper[4631]: I1204 17:33:29.239920 4631 scope.go:117] "RemoveContainer" containerID="a8ffddb506b84269d5c0a119c33b34c445a6b55b13d82cbf88032005aef584f6" Dec 04 17:33:30 crc kubenswrapper[4631]: I1204 17:33:30.038118 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-cc7989dc6-mvfhk_0e227500-d232-4c7b-b78e-24f66d88f82a/oauth-openshift/2.log" Dec 04 17:33:30 crc kubenswrapper[4631]: I1204 17:33:30.038598 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" event={"ID":"0e227500-d232-4c7b-b78e-24f66d88f82a","Type":"ContainerStarted","Data":"132729a84ddcec0d93912aa03337259fcc914a5fe035fc7940986db5e6e05ea5"} Dec 04 17:33:30 crc kubenswrapper[4631]: I1204 17:33:30.038878 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:33:30 crc kubenswrapper[4631]: I1204 17:33:30.062957 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" podStartSLOduration=82.062941212 podStartE2EDuration="1m22.062941212s" podCreationTimestamp="2025-12-04 17:32:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:33:01.878609522 +0000 UTC m=+311.910851520" watchObservedRunningTime="2025-12-04 17:33:30.062941212 +0000 UTC m=+340.095183210" Dec 04 17:33:30 crc kubenswrapper[4631]: I1204 17:33:30.148623 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-cc7989dc6-mvfhk" Dec 04 17:33:57 crc kubenswrapper[4631]: I1204 17:33:57.691809 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5vvr2"] Dec 04 17:33:57 crc kubenswrapper[4631]: I1204 17:33:57.693002 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" podUID="85d00b11-ed99-44ac-81b8-73d958bc4d3e" containerName="controller-manager" containerID="cri-o://8c86a2ea23af5a07be57b351f6333efb7ed8252c7f01538d8428e7e9150d3697" gracePeriod=30 Dec 04 17:33:57 crc kubenswrapper[4631]: I1204 17:33:57.803556 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr"] Dec 04 17:33:57 crc kubenswrapper[4631]: I1204 17:33:57.803774 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" podUID="f6f09a1b-f9c2-43ec-8222-1fa25a379095" containerName="route-controller-manager" containerID="cri-o://e124d0d4346b689a96e88cae51fae98d342905979533b5dadd243e8a27933019" gracePeriod=30 Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.147886 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.215034 4631 generic.go:334] "Generic (PLEG): container finished" podID="85d00b11-ed99-44ac-81b8-73d958bc4d3e" containerID="8c86a2ea23af5a07be57b351f6333efb7ed8252c7f01538d8428e7e9150d3697" exitCode=0 Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.215086 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" event={"ID":"85d00b11-ed99-44ac-81b8-73d958bc4d3e","Type":"ContainerDied","Data":"8c86a2ea23af5a07be57b351f6333efb7ed8252c7f01538d8428e7e9150d3697"} Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.215114 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" event={"ID":"85d00b11-ed99-44ac-81b8-73d958bc4d3e","Type":"ContainerDied","Data":"c69423100875f5d43cc9ba2e43ed29f7919ce1026c8e326060835f9f0dda6e98"} Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.215130 4631 scope.go:117] "RemoveContainer" containerID="8c86a2ea23af5a07be57b351f6333efb7ed8252c7f01538d8428e7e9150d3697" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.215224 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5vvr2" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.223552 4631 generic.go:334] "Generic (PLEG): container finished" podID="f6f09a1b-f9c2-43ec-8222-1fa25a379095" containerID="e124d0d4346b689a96e88cae51fae98d342905979533b5dadd243e8a27933019" exitCode=0 Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.223592 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" event={"ID":"f6f09a1b-f9c2-43ec-8222-1fa25a379095","Type":"ContainerDied","Data":"e124d0d4346b689a96e88cae51fae98d342905979533b5dadd243e8a27933019"} Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.249231 4631 scope.go:117] "RemoveContainer" containerID="8c86a2ea23af5a07be57b351f6333efb7ed8252c7f01538d8428e7e9150d3697" Dec 04 17:33:58 crc kubenswrapper[4631]: E1204 17:33:58.249550 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c86a2ea23af5a07be57b351f6333efb7ed8252c7f01538d8428e7e9150d3697\": container with ID starting with 8c86a2ea23af5a07be57b351f6333efb7ed8252c7f01538d8428e7e9150d3697 not found: ID does not exist" containerID="8c86a2ea23af5a07be57b351f6333efb7ed8252c7f01538d8428e7e9150d3697" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.249579 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c86a2ea23af5a07be57b351f6333efb7ed8252c7f01538d8428e7e9150d3697"} err="failed to get container status \"8c86a2ea23af5a07be57b351f6333efb7ed8252c7f01538d8428e7e9150d3697\": rpc error: code = NotFound desc = could not find container \"8c86a2ea23af5a07be57b351f6333efb7ed8252c7f01538d8428e7e9150d3697\": container with ID starting with 8c86a2ea23af5a07be57b351f6333efb7ed8252c7f01538d8428e7e9150d3697 not found: ID does not exist" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.300299 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.343956 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njcf7\" (UniqueName: \"kubernetes.io/projected/85d00b11-ed99-44ac-81b8-73d958bc4d3e-kube-api-access-njcf7\") pod \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.344022 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85d00b11-ed99-44ac-81b8-73d958bc4d3e-config\") pod \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.344077 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85d00b11-ed99-44ac-81b8-73d958bc4d3e-client-ca\") pod \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.344097 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85d00b11-ed99-44ac-81b8-73d958bc4d3e-serving-cert\") pod \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.344120 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/85d00b11-ed99-44ac-81b8-73d958bc4d3e-proxy-ca-bundles\") pod \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\" (UID: \"85d00b11-ed99-44ac-81b8-73d958bc4d3e\") " Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.345471 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85d00b11-ed99-44ac-81b8-73d958bc4d3e-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "85d00b11-ed99-44ac-81b8-73d958bc4d3e" (UID: "85d00b11-ed99-44ac-81b8-73d958bc4d3e"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.345503 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85d00b11-ed99-44ac-81b8-73d958bc4d3e-config" (OuterVolumeSpecName: "config") pod "85d00b11-ed99-44ac-81b8-73d958bc4d3e" (UID: "85d00b11-ed99-44ac-81b8-73d958bc4d3e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.346106 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85d00b11-ed99-44ac-81b8-73d958bc4d3e-client-ca" (OuterVolumeSpecName: "client-ca") pod "85d00b11-ed99-44ac-81b8-73d958bc4d3e" (UID: "85d00b11-ed99-44ac-81b8-73d958bc4d3e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.353076 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85d00b11-ed99-44ac-81b8-73d958bc4d3e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "85d00b11-ed99-44ac-81b8-73d958bc4d3e" (UID: "85d00b11-ed99-44ac-81b8-73d958bc4d3e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.353996 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85d00b11-ed99-44ac-81b8-73d958bc4d3e-kube-api-access-njcf7" (OuterVolumeSpecName: "kube-api-access-njcf7") pod "85d00b11-ed99-44ac-81b8-73d958bc4d3e" (UID: "85d00b11-ed99-44ac-81b8-73d958bc4d3e"). InnerVolumeSpecName "kube-api-access-njcf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.445700 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6f09a1b-f9c2-43ec-8222-1fa25a379095-serving-cert\") pod \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\" (UID: \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\") " Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.445829 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6f09a1b-f9c2-43ec-8222-1fa25a379095-config\") pod \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\" (UID: \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\") " Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.445866 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f6f09a1b-f9c2-43ec-8222-1fa25a379095-client-ca\") pod \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\" (UID: \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\") " Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.445951 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b65mk\" (UniqueName: \"kubernetes.io/projected/f6f09a1b-f9c2-43ec-8222-1fa25a379095-kube-api-access-b65mk\") pod \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\" (UID: \"f6f09a1b-f9c2-43ec-8222-1fa25a379095\") " Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.446274 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85d00b11-ed99-44ac-81b8-73d958bc4d3e-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.446298 4631 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85d00b11-ed99-44ac-81b8-73d958bc4d3e-client-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.446309 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85d00b11-ed99-44ac-81b8-73d958bc4d3e-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.446320 4631 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/85d00b11-ed99-44ac-81b8-73d958bc4d3e-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.446333 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njcf7\" (UniqueName: \"kubernetes.io/projected/85d00b11-ed99-44ac-81b8-73d958bc4d3e-kube-api-access-njcf7\") on node \"crc\" DevicePath \"\"" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.447307 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6f09a1b-f9c2-43ec-8222-1fa25a379095-client-ca" (OuterVolumeSpecName: "client-ca") pod "f6f09a1b-f9c2-43ec-8222-1fa25a379095" (UID: "f6f09a1b-f9c2-43ec-8222-1fa25a379095"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.447320 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6f09a1b-f9c2-43ec-8222-1fa25a379095-config" (OuterVolumeSpecName: "config") pod "f6f09a1b-f9c2-43ec-8222-1fa25a379095" (UID: "f6f09a1b-f9c2-43ec-8222-1fa25a379095"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.453018 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6f09a1b-f9c2-43ec-8222-1fa25a379095-kube-api-access-b65mk" (OuterVolumeSpecName: "kube-api-access-b65mk") pod "f6f09a1b-f9c2-43ec-8222-1fa25a379095" (UID: "f6f09a1b-f9c2-43ec-8222-1fa25a379095"). InnerVolumeSpecName "kube-api-access-b65mk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.453103 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6f09a1b-f9c2-43ec-8222-1fa25a379095-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f6f09a1b-f9c2-43ec-8222-1fa25a379095" (UID: "f6f09a1b-f9c2-43ec-8222-1fa25a379095"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.545807 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5vvr2"] Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.547526 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b65mk\" (UniqueName: \"kubernetes.io/projected/f6f09a1b-f9c2-43ec-8222-1fa25a379095-kube-api-access-b65mk\") on node \"crc\" DevicePath \"\"" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.547573 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6f09a1b-f9c2-43ec-8222-1fa25a379095-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.547587 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6f09a1b-f9c2-43ec-8222-1fa25a379095-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.547600 4631 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f6f09a1b-f9c2-43ec-8222-1fa25a379095-client-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:33:58 crc kubenswrapper[4631]: I1204 17:33:58.557702 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5vvr2"] Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.233411 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" event={"ID":"f6f09a1b-f9c2-43ec-8222-1fa25a379095","Type":"ContainerDied","Data":"6194b319986ca79d53141e6b261f20da5c0a23c09056bb757d7051598c22c6cd"} Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.233461 4631 scope.go:117] "RemoveContainer" containerID="e124d0d4346b689a96e88cae51fae98d342905979533b5dadd243e8a27933019" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.233541 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.273292 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr"] Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.277001 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hdlxr"] Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.713193 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-d8dccc797-jj2ds"] Dec 04 17:33:59 crc kubenswrapper[4631]: E1204 17:33:59.714274 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85d00b11-ed99-44ac-81b8-73d958bc4d3e" containerName="controller-manager" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.714358 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="85d00b11-ed99-44ac-81b8-73d958bc4d3e" containerName="controller-manager" Dec 04 17:33:59 crc kubenswrapper[4631]: E1204 17:33:59.714439 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" containerName="extract-utilities" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.714493 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" containerName="extract-utilities" Dec 04 17:33:59 crc kubenswrapper[4631]: E1204 17:33:59.714549 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" containerName="registry-server" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.714601 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" containerName="registry-server" Dec 04 17:33:59 crc kubenswrapper[4631]: E1204 17:33:59.714663 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6f09a1b-f9c2-43ec-8222-1fa25a379095" containerName="route-controller-manager" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.714731 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6f09a1b-f9c2-43ec-8222-1fa25a379095" containerName="route-controller-manager" Dec 04 17:33:59 crc kubenswrapper[4631]: E1204 17:33:59.714798 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" containerName="extract-content" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.714855 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" containerName="extract-content" Dec 04 17:33:59 crc kubenswrapper[4631]: E1204 17:33:59.714913 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.714971 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 04 17:33:59 crc kubenswrapper[4631]: E1204 17:33:59.715031 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" containerName="installer" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.715089 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" containerName="installer" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.715256 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="85d00b11-ed99-44ac-81b8-73d958bc4d3e" containerName="controller-manager" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.715331 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c6c775a-ae4c-4682-97fd-5f9e4457f8fd" containerName="registry-server" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.715426 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.715494 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6f09a1b-f9c2-43ec-8222-1fa25a379095" containerName="route-controller-manager" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.715553 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fc9fdca-bb86-41d3-a800-2996fdeea0fd" containerName="installer" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.716021 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.716297 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29"] Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.717053 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.722038 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.722043 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.722049 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.723620 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.723672 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.725204 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.725405 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.725503 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.725651 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.726401 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.726457 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.726727 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.751733 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-d8dccc797-jj2ds"] Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.751802 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.764662 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29"] Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.864210 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98jnv\" (UniqueName: \"kubernetes.io/projected/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-kube-api-access-98jnv\") pod \"controller-manager-d8dccc797-jj2ds\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.864263 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/75060739-d159-4122-9664-852aebba7825-client-ca\") pod \"route-controller-manager-7c8bb87477-9th29\" (UID: \"75060739-d159-4122-9664-852aebba7825\") " pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.864283 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-proxy-ca-bundles\") pod \"controller-manager-d8dccc797-jj2ds\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.864303 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-serving-cert\") pod \"controller-manager-d8dccc797-jj2ds\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.864334 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75060739-d159-4122-9664-852aebba7825-serving-cert\") pod \"route-controller-manager-7c8bb87477-9th29\" (UID: \"75060739-d159-4122-9664-852aebba7825\") " pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.864442 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-client-ca\") pod \"controller-manager-d8dccc797-jj2ds\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.864464 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75060739-d159-4122-9664-852aebba7825-config\") pod \"route-controller-manager-7c8bb87477-9th29\" (UID: \"75060739-d159-4122-9664-852aebba7825\") " pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.864489 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-config\") pod \"controller-manager-d8dccc797-jj2ds\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.864512 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5k56f\" (UniqueName: \"kubernetes.io/projected/75060739-d159-4122-9664-852aebba7825-kube-api-access-5k56f\") pod \"route-controller-manager-7c8bb87477-9th29\" (UID: \"75060739-d159-4122-9664-852aebba7825\") " pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.965595 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5k56f\" (UniqueName: \"kubernetes.io/projected/75060739-d159-4122-9664-852aebba7825-kube-api-access-5k56f\") pod \"route-controller-manager-7c8bb87477-9th29\" (UID: \"75060739-d159-4122-9664-852aebba7825\") " pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.965982 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98jnv\" (UniqueName: \"kubernetes.io/projected/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-kube-api-access-98jnv\") pod \"controller-manager-d8dccc797-jj2ds\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.966096 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/75060739-d159-4122-9664-852aebba7825-client-ca\") pod \"route-controller-manager-7c8bb87477-9th29\" (UID: \"75060739-d159-4122-9664-852aebba7825\") " pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.966168 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-proxy-ca-bundles\") pod \"controller-manager-d8dccc797-jj2ds\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.966251 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-serving-cert\") pod \"controller-manager-d8dccc797-jj2ds\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.966342 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75060739-d159-4122-9664-852aebba7825-serving-cert\") pod \"route-controller-manager-7c8bb87477-9th29\" (UID: \"75060739-d159-4122-9664-852aebba7825\") " pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.966454 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-client-ca\") pod \"controller-manager-d8dccc797-jj2ds\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.966558 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75060739-d159-4122-9664-852aebba7825-config\") pod \"route-controller-manager-7c8bb87477-9th29\" (UID: \"75060739-d159-4122-9664-852aebba7825\") " pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.966644 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-config\") pod \"controller-manager-d8dccc797-jj2ds\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.967981 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-config\") pod \"controller-manager-d8dccc797-jj2ds\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.969488 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-client-ca\") pod \"controller-manager-d8dccc797-jj2ds\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.969682 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75060739-d159-4122-9664-852aebba7825-config\") pod \"route-controller-manager-7c8bb87477-9th29\" (UID: \"75060739-d159-4122-9664-852aebba7825\") " pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.969868 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-proxy-ca-bundles\") pod \"controller-manager-d8dccc797-jj2ds\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.970138 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/75060739-d159-4122-9664-852aebba7825-client-ca\") pod \"route-controller-manager-7c8bb87477-9th29\" (UID: \"75060739-d159-4122-9664-852aebba7825\") " pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.975206 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75060739-d159-4122-9664-852aebba7825-serving-cert\") pod \"route-controller-manager-7c8bb87477-9th29\" (UID: \"75060739-d159-4122-9664-852aebba7825\") " pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.978126 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-serving-cert\") pod \"controller-manager-d8dccc797-jj2ds\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:33:59 crc kubenswrapper[4631]: I1204 17:33:59.989546 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5k56f\" (UniqueName: \"kubernetes.io/projected/75060739-d159-4122-9664-852aebba7825-kube-api-access-5k56f\") pod \"route-controller-manager-7c8bb87477-9th29\" (UID: \"75060739-d159-4122-9664-852aebba7825\") " pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" Dec 04 17:34:00 crc kubenswrapper[4631]: I1204 17:34:00.001489 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98jnv\" (UniqueName: \"kubernetes.io/projected/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-kube-api-access-98jnv\") pod \"controller-manager-d8dccc797-jj2ds\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:34:00 crc kubenswrapper[4631]: I1204 17:34:00.034218 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:34:00 crc kubenswrapper[4631]: I1204 17:34:00.041062 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" Dec 04 17:34:00 crc kubenswrapper[4631]: I1204 17:34:00.245207 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85d00b11-ed99-44ac-81b8-73d958bc4d3e" path="/var/lib/kubelet/pods/85d00b11-ed99-44ac-81b8-73d958bc4d3e/volumes" Dec 04 17:34:00 crc kubenswrapper[4631]: I1204 17:34:00.246721 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6f09a1b-f9c2-43ec-8222-1fa25a379095" path="/var/lib/kubelet/pods/f6f09a1b-f9c2-43ec-8222-1fa25a379095/volumes" Dec 04 17:34:00 crc kubenswrapper[4631]: I1204 17:34:00.353153 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-d8dccc797-jj2ds"] Dec 04 17:34:00 crc kubenswrapper[4631]: I1204 17:34:00.501477 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29"] Dec 04 17:34:00 crc kubenswrapper[4631]: W1204 17:34:00.514880 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod75060739_d159_4122_9664_852aebba7825.slice/crio-28a1dbe862c9e833a3f7ccd9ded3cdc56b99393952a7bacfa84e7e78ecfe9c20 WatchSource:0}: Error finding container 28a1dbe862c9e833a3f7ccd9ded3cdc56b99393952a7bacfa84e7e78ecfe9c20: Status 404 returned error can't find the container with id 28a1dbe862c9e833a3f7ccd9ded3cdc56b99393952a7bacfa84e7e78ecfe9c20 Dec 04 17:34:01 crc kubenswrapper[4631]: I1204 17:34:01.249976 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" event={"ID":"75060739-d159-4122-9664-852aebba7825","Type":"ContainerStarted","Data":"28a1dbe862c9e833a3f7ccd9ded3cdc56b99393952a7bacfa84e7e78ecfe9c20"} Dec 04 17:34:01 crc kubenswrapper[4631]: I1204 17:34:01.251639 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" event={"ID":"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c","Type":"ContainerStarted","Data":"87436a2ce54ed7cc4a42b9f1124be5be176cc91b866a71a01b6b765bd18be06f"} Dec 04 17:34:01 crc kubenswrapper[4631]: I1204 17:34:01.251675 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" event={"ID":"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c","Type":"ContainerStarted","Data":"1c16156d24cbd55cd83dc8e129c6c9e2bd56c96f5508327c2d06c0b5fe938997"} Dec 04 17:34:02 crc kubenswrapper[4631]: I1204 17:34:02.257981 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" event={"ID":"75060739-d159-4122-9664-852aebba7825","Type":"ContainerStarted","Data":"3e4a2b4977e6a1d583d17f4da280b906762730b9b24676466cc463903c6adca2"} Dec 04 17:34:02 crc kubenswrapper[4631]: I1204 17:34:02.259113 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:34:02 crc kubenswrapper[4631]: I1204 17:34:02.265957 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:34:02 crc kubenswrapper[4631]: I1204 17:34:02.277707 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" podStartSLOduration=4.277688138 podStartE2EDuration="4.277688138s" podCreationTimestamp="2025-12-04 17:33:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:34:02.276701379 +0000 UTC m=+372.308943397" watchObservedRunningTime="2025-12-04 17:34:02.277688138 +0000 UTC m=+372.309930136" Dec 04 17:34:02 crc kubenswrapper[4631]: I1204 17:34:02.294676 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" podStartSLOduration=4.294656159 podStartE2EDuration="4.294656159s" podCreationTimestamp="2025-12-04 17:33:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:34:02.293284299 +0000 UTC m=+372.325526317" watchObservedRunningTime="2025-12-04 17:34:02.294656159 +0000 UTC m=+372.326898157" Dec 04 17:34:03 crc kubenswrapper[4631]: I1204 17:34:03.265036 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" Dec 04 17:34:03 crc kubenswrapper[4631]: I1204 17:34:03.271087 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" Dec 04 17:34:06 crc kubenswrapper[4631]: I1204 17:34:06.023495 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:34:06 crc kubenswrapper[4631]: I1204 17:34:06.023575 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:34:17 crc kubenswrapper[4631]: I1204 17:34:17.696157 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-d8dccc797-jj2ds"] Dec 04 17:34:17 crc kubenswrapper[4631]: I1204 17:34:17.697330 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" podUID="869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c" containerName="controller-manager" containerID="cri-o://87436a2ce54ed7cc4a42b9f1124be5be176cc91b866a71a01b6b765bd18be06f" gracePeriod=30 Dec 04 17:34:17 crc kubenswrapper[4631]: I1204 17:34:17.730029 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29"] Dec 04 17:34:17 crc kubenswrapper[4631]: I1204 17:34:17.730251 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" podUID="75060739-d159-4122-9664-852aebba7825" containerName="route-controller-manager" containerID="cri-o://3e4a2b4977e6a1d583d17f4da280b906762730b9b24676466cc463903c6adca2" gracePeriod=30 Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.231815 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.237318 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.338905 4631 generic.go:334] "Generic (PLEG): container finished" podID="75060739-d159-4122-9664-852aebba7825" containerID="3e4a2b4977e6a1d583d17f4da280b906762730b9b24676466cc463903c6adca2" exitCode=0 Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.338975 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.339015 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" event={"ID":"75060739-d159-4122-9664-852aebba7825","Type":"ContainerDied","Data":"3e4a2b4977e6a1d583d17f4da280b906762730b9b24676466cc463903c6adca2"} Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.339079 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29" event={"ID":"75060739-d159-4122-9664-852aebba7825","Type":"ContainerDied","Data":"28a1dbe862c9e833a3f7ccd9ded3cdc56b99393952a7bacfa84e7e78ecfe9c20"} Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.339125 4631 scope.go:117] "RemoveContainer" containerID="3e4a2b4977e6a1d583d17f4da280b906762730b9b24676466cc463903c6adca2" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.343502 4631 generic.go:334] "Generic (PLEG): container finished" podID="869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c" containerID="87436a2ce54ed7cc4a42b9f1124be5be176cc91b866a71a01b6b765bd18be06f" exitCode=0 Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.343536 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" event={"ID":"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c","Type":"ContainerDied","Data":"87436a2ce54ed7cc4a42b9f1124be5be176cc91b866a71a01b6b765bd18be06f"} Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.343557 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" event={"ID":"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c","Type":"ContainerDied","Data":"1c16156d24cbd55cd83dc8e129c6c9e2bd56c96f5508327c2d06c0b5fe938997"} Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.343605 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-d8dccc797-jj2ds" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.359007 4631 scope.go:117] "RemoveContainer" containerID="3e4a2b4977e6a1d583d17f4da280b906762730b9b24676466cc463903c6adca2" Dec 04 17:34:18 crc kubenswrapper[4631]: E1204 17:34:18.359477 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e4a2b4977e6a1d583d17f4da280b906762730b9b24676466cc463903c6adca2\": container with ID starting with 3e4a2b4977e6a1d583d17f4da280b906762730b9b24676466cc463903c6adca2 not found: ID does not exist" containerID="3e4a2b4977e6a1d583d17f4da280b906762730b9b24676466cc463903c6adca2" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.359507 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e4a2b4977e6a1d583d17f4da280b906762730b9b24676466cc463903c6adca2"} err="failed to get container status \"3e4a2b4977e6a1d583d17f4da280b906762730b9b24676466cc463903c6adca2\": rpc error: code = NotFound desc = could not find container \"3e4a2b4977e6a1d583d17f4da280b906762730b9b24676466cc463903c6adca2\": container with ID starting with 3e4a2b4977e6a1d583d17f4da280b906762730b9b24676466cc463903c6adca2 not found: ID does not exist" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.359525 4631 scope.go:117] "RemoveContainer" containerID="87436a2ce54ed7cc4a42b9f1124be5be176cc91b866a71a01b6b765bd18be06f" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.371541 4631 scope.go:117] "RemoveContainer" containerID="87436a2ce54ed7cc4a42b9f1124be5be176cc91b866a71a01b6b765bd18be06f" Dec 04 17:34:18 crc kubenswrapper[4631]: E1204 17:34:18.372098 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87436a2ce54ed7cc4a42b9f1124be5be176cc91b866a71a01b6b765bd18be06f\": container with ID starting with 87436a2ce54ed7cc4a42b9f1124be5be176cc91b866a71a01b6b765bd18be06f not found: ID does not exist" containerID="87436a2ce54ed7cc4a42b9f1124be5be176cc91b866a71a01b6b765bd18be06f" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.372151 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87436a2ce54ed7cc4a42b9f1124be5be176cc91b866a71a01b6b765bd18be06f"} err="failed to get container status \"87436a2ce54ed7cc4a42b9f1124be5be176cc91b866a71a01b6b765bd18be06f\": rpc error: code = NotFound desc = could not find container \"87436a2ce54ed7cc4a42b9f1124be5be176cc91b866a71a01b6b765bd18be06f\": container with ID starting with 87436a2ce54ed7cc4a42b9f1124be5be176cc91b866a71a01b6b765bd18be06f not found: ID does not exist" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.400801 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5k56f\" (UniqueName: \"kubernetes.io/projected/75060739-d159-4122-9664-852aebba7825-kube-api-access-5k56f\") pod \"75060739-d159-4122-9664-852aebba7825\" (UID: \"75060739-d159-4122-9664-852aebba7825\") " Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.400901 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-serving-cert\") pod \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.400980 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-client-ca\") pod \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.401056 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-proxy-ca-bundles\") pod \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.401096 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-config\") pod \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.401245 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75060739-d159-4122-9664-852aebba7825-config\") pod \"75060739-d159-4122-9664-852aebba7825\" (UID: \"75060739-d159-4122-9664-852aebba7825\") " Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.401288 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98jnv\" (UniqueName: \"kubernetes.io/projected/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-kube-api-access-98jnv\") pod \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\" (UID: \"869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c\") " Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.401315 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75060739-d159-4122-9664-852aebba7825-serving-cert\") pod \"75060739-d159-4122-9664-852aebba7825\" (UID: \"75060739-d159-4122-9664-852aebba7825\") " Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.401352 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/75060739-d159-4122-9664-852aebba7825-client-ca\") pod \"75060739-d159-4122-9664-852aebba7825\" (UID: \"75060739-d159-4122-9664-852aebba7825\") " Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.402596 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c" (UID: "869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.402632 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-config" (OuterVolumeSpecName: "config") pod "869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c" (UID: "869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.402846 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.403022 4631 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.403116 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75060739-d159-4122-9664-852aebba7825-client-ca" (OuterVolumeSpecName: "client-ca") pod "75060739-d159-4122-9664-852aebba7825" (UID: "75060739-d159-4122-9664-852aebba7825"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.403131 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75060739-d159-4122-9664-852aebba7825-config" (OuterVolumeSpecName: "config") pod "75060739-d159-4122-9664-852aebba7825" (UID: "75060739-d159-4122-9664-852aebba7825"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.405065 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-client-ca" (OuterVolumeSpecName: "client-ca") pod "869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c" (UID: "869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.408547 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75060739-d159-4122-9664-852aebba7825-kube-api-access-5k56f" (OuterVolumeSpecName: "kube-api-access-5k56f") pod "75060739-d159-4122-9664-852aebba7825" (UID: "75060739-d159-4122-9664-852aebba7825"). InnerVolumeSpecName "kube-api-access-5k56f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.408593 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-kube-api-access-98jnv" (OuterVolumeSpecName: "kube-api-access-98jnv") pod "869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c" (UID: "869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c"). InnerVolumeSpecName "kube-api-access-98jnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.409143 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c" (UID: "869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.410462 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75060739-d159-4122-9664-852aebba7825-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "75060739-d159-4122-9664-852aebba7825" (UID: "75060739-d159-4122-9664-852aebba7825"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.503588 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75060739-d159-4122-9664-852aebba7825-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.503627 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98jnv\" (UniqueName: \"kubernetes.io/projected/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-kube-api-access-98jnv\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.503638 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75060739-d159-4122-9664-852aebba7825-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.503648 4631 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/75060739-d159-4122-9664-852aebba7825-client-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.503657 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5k56f\" (UniqueName: \"kubernetes.io/projected/75060739-d159-4122-9664-852aebba7825-kube-api-access-5k56f\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.503665 4631 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.503673 4631 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c-client-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.672652 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29"] Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.680207 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7c8bb87477-9th29"] Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.684435 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-d8dccc797-jj2ds"] Dec 04 17:34:18 crc kubenswrapper[4631]: I1204 17:34:18.689329 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-d8dccc797-jj2ds"] Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.726006 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5b98d8c596-zwnw4"] Dec 04 17:34:19 crc kubenswrapper[4631]: E1204 17:34:19.726320 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75060739-d159-4122-9664-852aebba7825" containerName="route-controller-manager" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.726338 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="75060739-d159-4122-9664-852aebba7825" containerName="route-controller-manager" Dec 04 17:34:19 crc kubenswrapper[4631]: E1204 17:34:19.726354 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c" containerName="controller-manager" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.726363 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c" containerName="controller-manager" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.726484 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="75060739-d159-4122-9664-852aebba7825" containerName="route-controller-manager" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.726506 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c" containerName="controller-manager" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.727188 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.730046 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.730049 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.730456 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.731225 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.732113 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg"] Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.732611 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.732958 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.734458 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.734840 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.735502 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.736943 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.737010 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.737076 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.738417 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.744830 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.749967 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5b98d8c596-zwnw4"] Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.753904 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg"] Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.821618 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e73d6e37-1726-4ab2-bbdf-85db37463f8e-serving-cert\") pod \"controller-manager-5b98d8c596-zwnw4\" (UID: \"e73d6e37-1726-4ab2-bbdf-85db37463f8e\") " pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.821663 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9dd984d9-0f82-4f85-8a64-db3960dfa16a-client-ca\") pod \"route-controller-manager-77cb67458c-9lqdg\" (UID: \"9dd984d9-0f82-4f85-8a64-db3960dfa16a\") " pod="openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.821688 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e73d6e37-1726-4ab2-bbdf-85db37463f8e-config\") pod \"controller-manager-5b98d8c596-zwnw4\" (UID: \"e73d6e37-1726-4ab2-bbdf-85db37463f8e\") " pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.821704 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e73d6e37-1726-4ab2-bbdf-85db37463f8e-proxy-ca-bundles\") pod \"controller-manager-5b98d8c596-zwnw4\" (UID: \"e73d6e37-1726-4ab2-bbdf-85db37463f8e\") " pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.821734 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9dd984d9-0f82-4f85-8a64-db3960dfa16a-serving-cert\") pod \"route-controller-manager-77cb67458c-9lqdg\" (UID: \"9dd984d9-0f82-4f85-8a64-db3960dfa16a\") " pod="openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.821832 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9dd984d9-0f82-4f85-8a64-db3960dfa16a-config\") pod \"route-controller-manager-77cb67458c-9lqdg\" (UID: \"9dd984d9-0f82-4f85-8a64-db3960dfa16a\") " pod="openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.821852 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e73d6e37-1726-4ab2-bbdf-85db37463f8e-client-ca\") pod \"controller-manager-5b98d8c596-zwnw4\" (UID: \"e73d6e37-1726-4ab2-bbdf-85db37463f8e\") " pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.821881 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvjf2\" (UniqueName: \"kubernetes.io/projected/e73d6e37-1726-4ab2-bbdf-85db37463f8e-kube-api-access-dvjf2\") pod \"controller-manager-5b98d8c596-zwnw4\" (UID: \"e73d6e37-1726-4ab2-bbdf-85db37463f8e\") " pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.821898 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9xh4\" (UniqueName: \"kubernetes.io/projected/9dd984d9-0f82-4f85-8a64-db3960dfa16a-kube-api-access-q9xh4\") pod \"route-controller-manager-77cb67458c-9lqdg\" (UID: \"9dd984d9-0f82-4f85-8a64-db3960dfa16a\") " pod="openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.923136 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e73d6e37-1726-4ab2-bbdf-85db37463f8e-serving-cert\") pod \"controller-manager-5b98d8c596-zwnw4\" (UID: \"e73d6e37-1726-4ab2-bbdf-85db37463f8e\") " pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.923191 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9dd984d9-0f82-4f85-8a64-db3960dfa16a-client-ca\") pod \"route-controller-manager-77cb67458c-9lqdg\" (UID: \"9dd984d9-0f82-4f85-8a64-db3960dfa16a\") " pod="openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.923214 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e73d6e37-1726-4ab2-bbdf-85db37463f8e-config\") pod \"controller-manager-5b98d8c596-zwnw4\" (UID: \"e73d6e37-1726-4ab2-bbdf-85db37463f8e\") " pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.923231 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e73d6e37-1726-4ab2-bbdf-85db37463f8e-proxy-ca-bundles\") pod \"controller-manager-5b98d8c596-zwnw4\" (UID: \"e73d6e37-1726-4ab2-bbdf-85db37463f8e\") " pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.923261 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9dd984d9-0f82-4f85-8a64-db3960dfa16a-serving-cert\") pod \"route-controller-manager-77cb67458c-9lqdg\" (UID: \"9dd984d9-0f82-4f85-8a64-db3960dfa16a\") " pod="openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.923280 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9dd984d9-0f82-4f85-8a64-db3960dfa16a-config\") pod \"route-controller-manager-77cb67458c-9lqdg\" (UID: \"9dd984d9-0f82-4f85-8a64-db3960dfa16a\") " pod="openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.923297 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e73d6e37-1726-4ab2-bbdf-85db37463f8e-client-ca\") pod \"controller-manager-5b98d8c596-zwnw4\" (UID: \"e73d6e37-1726-4ab2-bbdf-85db37463f8e\") " pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.923327 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvjf2\" (UniqueName: \"kubernetes.io/projected/e73d6e37-1726-4ab2-bbdf-85db37463f8e-kube-api-access-dvjf2\") pod \"controller-manager-5b98d8c596-zwnw4\" (UID: \"e73d6e37-1726-4ab2-bbdf-85db37463f8e\") " pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.923343 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9xh4\" (UniqueName: \"kubernetes.io/projected/9dd984d9-0f82-4f85-8a64-db3960dfa16a-kube-api-access-q9xh4\") pod \"route-controller-manager-77cb67458c-9lqdg\" (UID: \"9dd984d9-0f82-4f85-8a64-db3960dfa16a\") " pod="openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.924687 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9dd984d9-0f82-4f85-8a64-db3960dfa16a-client-ca\") pod \"route-controller-manager-77cb67458c-9lqdg\" (UID: \"9dd984d9-0f82-4f85-8a64-db3960dfa16a\") " pod="openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.925671 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9dd984d9-0f82-4f85-8a64-db3960dfa16a-config\") pod \"route-controller-manager-77cb67458c-9lqdg\" (UID: \"9dd984d9-0f82-4f85-8a64-db3960dfa16a\") " pod="openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.926292 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e73d6e37-1726-4ab2-bbdf-85db37463f8e-client-ca\") pod \"controller-manager-5b98d8c596-zwnw4\" (UID: \"e73d6e37-1726-4ab2-bbdf-85db37463f8e\") " pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.927357 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e73d6e37-1726-4ab2-bbdf-85db37463f8e-config\") pod \"controller-manager-5b98d8c596-zwnw4\" (UID: \"e73d6e37-1726-4ab2-bbdf-85db37463f8e\") " pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.927722 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e73d6e37-1726-4ab2-bbdf-85db37463f8e-proxy-ca-bundles\") pod \"controller-manager-5b98d8c596-zwnw4\" (UID: \"e73d6e37-1726-4ab2-bbdf-85db37463f8e\") " pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.929939 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9dd984d9-0f82-4f85-8a64-db3960dfa16a-serving-cert\") pod \"route-controller-manager-77cb67458c-9lqdg\" (UID: \"9dd984d9-0f82-4f85-8a64-db3960dfa16a\") " pod="openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.932914 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e73d6e37-1726-4ab2-bbdf-85db37463f8e-serving-cert\") pod \"controller-manager-5b98d8c596-zwnw4\" (UID: \"e73d6e37-1726-4ab2-bbdf-85db37463f8e\") " pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.940798 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9xh4\" (UniqueName: \"kubernetes.io/projected/9dd984d9-0f82-4f85-8a64-db3960dfa16a-kube-api-access-q9xh4\") pod \"route-controller-manager-77cb67458c-9lqdg\" (UID: \"9dd984d9-0f82-4f85-8a64-db3960dfa16a\") " pod="openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg" Dec 04 17:34:19 crc kubenswrapper[4631]: I1204 17:34:19.940993 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvjf2\" (UniqueName: \"kubernetes.io/projected/e73d6e37-1726-4ab2-bbdf-85db37463f8e-kube-api-access-dvjf2\") pod \"controller-manager-5b98d8c596-zwnw4\" (UID: \"e73d6e37-1726-4ab2-bbdf-85db37463f8e\") " pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" Dec 04 17:34:20 crc kubenswrapper[4631]: I1204 17:34:20.050920 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" Dec 04 17:34:20 crc kubenswrapper[4631]: I1204 17:34:20.063122 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg" Dec 04 17:34:20 crc kubenswrapper[4631]: I1204 17:34:20.246244 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75060739-d159-4122-9664-852aebba7825" path="/var/lib/kubelet/pods/75060739-d159-4122-9664-852aebba7825/volumes" Dec 04 17:34:20 crc kubenswrapper[4631]: I1204 17:34:20.247418 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c" path="/var/lib/kubelet/pods/869d5b3d-1f2e-4ba1-a2e7-f1e726015d1c/volumes" Dec 04 17:34:20 crc kubenswrapper[4631]: I1204 17:34:20.271716 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5b98d8c596-zwnw4"] Dec 04 17:34:20 crc kubenswrapper[4631]: I1204 17:34:20.314889 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg"] Dec 04 17:34:20 crc kubenswrapper[4631]: W1204 17:34:20.319481 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9dd984d9_0f82_4f85_8a64_db3960dfa16a.slice/crio-271ba1c165c3b319ae29d2091950e7b9d982c2748074997324af3d42995a9aa2 WatchSource:0}: Error finding container 271ba1c165c3b319ae29d2091950e7b9d982c2748074997324af3d42995a9aa2: Status 404 returned error can't find the container with id 271ba1c165c3b319ae29d2091950e7b9d982c2748074997324af3d42995a9aa2 Dec 04 17:34:20 crc kubenswrapper[4631]: I1204 17:34:20.360725 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg" event={"ID":"9dd984d9-0f82-4f85-8a64-db3960dfa16a","Type":"ContainerStarted","Data":"271ba1c165c3b319ae29d2091950e7b9d982c2748074997324af3d42995a9aa2"} Dec 04 17:34:20 crc kubenswrapper[4631]: I1204 17:34:20.361639 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" event={"ID":"e73d6e37-1726-4ab2-bbdf-85db37463f8e","Type":"ContainerStarted","Data":"102db7d3212d5b0696f7152c14086c7243f63f0f33279622df3f66437d9440f7"} Dec 04 17:34:21 crc kubenswrapper[4631]: I1204 17:34:21.371267 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg" event={"ID":"9dd984d9-0f82-4f85-8a64-db3960dfa16a","Type":"ContainerStarted","Data":"be6ff42b9dc5e54a160792040b6cfc6d59adcd06af0d0163725391f7130d1d6b"} Dec 04 17:34:21 crc kubenswrapper[4631]: I1204 17:34:21.372187 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg" Dec 04 17:34:21 crc kubenswrapper[4631]: I1204 17:34:21.374550 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" event={"ID":"e73d6e37-1726-4ab2-bbdf-85db37463f8e","Type":"ContainerStarted","Data":"5c2a27aef8ef39ded4a73e042678b3a5f6e2c42a1d4d6c4543bb5a466908a21e"} Dec 04 17:34:21 crc kubenswrapper[4631]: I1204 17:34:21.374904 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" Dec 04 17:34:21 crc kubenswrapper[4631]: I1204 17:34:21.378953 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg" Dec 04 17:34:21 crc kubenswrapper[4631]: I1204 17:34:21.384483 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" Dec 04 17:34:21 crc kubenswrapper[4631]: I1204 17:34:21.404216 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-77cb67458c-9lqdg" podStartSLOduration=4.404193854 podStartE2EDuration="4.404193854s" podCreationTimestamp="2025-12-04 17:34:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:34:21.398446514 +0000 UTC m=+391.430688532" watchObservedRunningTime="2025-12-04 17:34:21.404193854 +0000 UTC m=+391.436435862" Dec 04 17:34:21 crc kubenswrapper[4631]: I1204 17:34:21.421587 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5b98d8c596-zwnw4" podStartSLOduration=4.421559626 podStartE2EDuration="4.421559626s" podCreationTimestamp="2025-12-04 17:34:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:34:21.420261548 +0000 UTC m=+391.452503566" watchObservedRunningTime="2025-12-04 17:34:21.421559626 +0000 UTC m=+391.453801664" Dec 04 17:34:36 crc kubenswrapper[4631]: I1204 17:34:36.023024 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:34:36 crc kubenswrapper[4631]: I1204 17:34:36.023980 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.197541 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-rrg2v"] Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.199174 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.219677 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-rrg2v"] Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.355900 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6t9ts\" (UniqueName: \"kubernetes.io/projected/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-kube-api-access-6t9ts\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.356113 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-bound-sa-token\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.356200 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-registry-tls\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.356252 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-ca-trust-extracted\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.356326 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.356489 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-trusted-ca\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.356658 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-registry-certificates\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.356899 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-installation-pull-secrets\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.390834 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.458973 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-registry-certificates\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.459054 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-installation-pull-secrets\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.459123 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6t9ts\" (UniqueName: \"kubernetes.io/projected/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-kube-api-access-6t9ts\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.459152 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-bound-sa-token\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.459192 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-registry-tls\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.459218 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-ca-trust-extracted\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.459246 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-trusted-ca\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.461228 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-registry-certificates\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.461404 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-trusted-ca\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.462055 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-ca-trust-extracted\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.476575 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-installation-pull-secrets\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.484834 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-registry-tls\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.500400 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6t9ts\" (UniqueName: \"kubernetes.io/projected/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-kube-api-access-6t9ts\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.504092 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0b1b0210-f4c9-4862-ad05-58b40a3dc9b6-bound-sa-token\") pod \"image-registry-66df7c8f76-rrg2v\" (UID: \"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6\") " pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.521016 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:43 crc kubenswrapper[4631]: I1204 17:34:43.934607 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-rrg2v"] Dec 04 17:34:43 crc kubenswrapper[4631]: W1204 17:34:43.947527 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b1b0210_f4c9_4862_ad05_58b40a3dc9b6.slice/crio-a0673ecf013741d5d64f8769522ac7e42d5a73e42891a2b835d18f635ee9e6fc WatchSource:0}: Error finding container a0673ecf013741d5d64f8769522ac7e42d5a73e42891a2b835d18f635ee9e6fc: Status 404 returned error can't find the container with id a0673ecf013741d5d64f8769522ac7e42d5a73e42891a2b835d18f635ee9e6fc Dec 04 17:34:44 crc kubenswrapper[4631]: I1204 17:34:44.517481 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" event={"ID":"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6","Type":"ContainerStarted","Data":"a3eb74dfd1fa2b0738abd627267522abb0ab9237c712edac47303f1f556c9f92"} Dec 04 17:34:44 crc kubenswrapper[4631]: I1204 17:34:44.517526 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" event={"ID":"0b1b0210-f4c9-4862-ad05-58b40a3dc9b6","Type":"ContainerStarted","Data":"a0673ecf013741d5d64f8769522ac7e42d5a73e42891a2b835d18f635ee9e6fc"} Dec 04 17:34:44 crc kubenswrapper[4631]: I1204 17:34:44.517624 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:34:44 crc kubenswrapper[4631]: I1204 17:34:44.539551 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" podStartSLOduration=1.539532514 podStartE2EDuration="1.539532514s" podCreationTimestamp="2025-12-04 17:34:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:34:44.535828307 +0000 UTC m=+414.568070325" watchObservedRunningTime="2025-12-04 17:34:44.539532514 +0000 UTC m=+414.571774522" Dec 04 17:34:49 crc kubenswrapper[4631]: I1204 17:34:49.992742 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l85pg"] Dec 04 17:34:49 crc kubenswrapper[4631]: I1204 17:34:49.993697 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-l85pg" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" containerName="registry-server" containerID="cri-o://a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360" gracePeriod=2 Dec 04 17:34:50 crc kubenswrapper[4631]: E1204 17:34:50.210642 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360 is running failed: container process not found" containerID="a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360" cmd=["grpc_health_probe","-addr=:50051"] Dec 04 17:34:50 crc kubenswrapper[4631]: E1204 17:34:50.211180 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360 is running failed: container process not found" containerID="a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360" cmd=["grpc_health_probe","-addr=:50051"] Dec 04 17:34:50 crc kubenswrapper[4631]: E1204 17:34:50.218544 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360 is running failed: container process not found" containerID="a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360" cmd=["grpc_health_probe","-addr=:50051"] Dec 04 17:34:50 crc kubenswrapper[4631]: E1204 17:34:50.218627 4631 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-l85pg" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" containerName="registry-server" Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.530953 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l85pg" Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.554097 4631 generic.go:334] "Generic (PLEG): container finished" podID="5cb73c44-e995-4e73-9bd3-422c00633ddf" containerID="a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360" exitCode=0 Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.554153 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l85pg" event={"ID":"5cb73c44-e995-4e73-9bd3-422c00633ddf","Type":"ContainerDied","Data":"a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360"} Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.554183 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l85pg" event={"ID":"5cb73c44-e995-4e73-9bd3-422c00633ddf","Type":"ContainerDied","Data":"5ba12a072c105c566b67df94f434ddf3c098765b38c4cb7cf900a5b304f4e02e"} Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.554209 4631 scope.go:117] "RemoveContainer" containerID="a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360" Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.554194 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l85pg" Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.574970 4631 scope.go:117] "RemoveContainer" containerID="3b8f8d6904928b5ed8ff52dc263e96b54f8c12021d69df5f307ca2441ec5b868" Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.599656 4631 scope.go:117] "RemoveContainer" containerID="1b7c5e8d67d0de5a953d8f1de0e0c9f807b981693ecf440796bedaeea987a8e9" Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.618745 4631 scope.go:117] "RemoveContainer" containerID="a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360" Dec 04 17:34:50 crc kubenswrapper[4631]: E1204 17:34:50.619197 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360\": container with ID starting with a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360 not found: ID does not exist" containerID="a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360" Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.619232 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360"} err="failed to get container status \"a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360\": rpc error: code = NotFound desc = could not find container \"a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360\": container with ID starting with a13e5c850faba8465998f86e2749c956e15d3207ae916b435edd13a54ae9e360 not found: ID does not exist" Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.619256 4631 scope.go:117] "RemoveContainer" containerID="3b8f8d6904928b5ed8ff52dc263e96b54f8c12021d69df5f307ca2441ec5b868" Dec 04 17:34:50 crc kubenswrapper[4631]: E1204 17:34:50.619628 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b8f8d6904928b5ed8ff52dc263e96b54f8c12021d69df5f307ca2441ec5b868\": container with ID starting with 3b8f8d6904928b5ed8ff52dc263e96b54f8c12021d69df5f307ca2441ec5b868 not found: ID does not exist" containerID="3b8f8d6904928b5ed8ff52dc263e96b54f8c12021d69df5f307ca2441ec5b868" Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.619688 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b8f8d6904928b5ed8ff52dc263e96b54f8c12021d69df5f307ca2441ec5b868"} err="failed to get container status \"3b8f8d6904928b5ed8ff52dc263e96b54f8c12021d69df5f307ca2441ec5b868\": rpc error: code = NotFound desc = could not find container \"3b8f8d6904928b5ed8ff52dc263e96b54f8c12021d69df5f307ca2441ec5b868\": container with ID starting with 3b8f8d6904928b5ed8ff52dc263e96b54f8c12021d69df5f307ca2441ec5b868 not found: ID does not exist" Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.619734 4631 scope.go:117] "RemoveContainer" containerID="1b7c5e8d67d0de5a953d8f1de0e0c9f807b981693ecf440796bedaeea987a8e9" Dec 04 17:34:50 crc kubenswrapper[4631]: E1204 17:34:50.620164 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b7c5e8d67d0de5a953d8f1de0e0c9f807b981693ecf440796bedaeea987a8e9\": container with ID starting with 1b7c5e8d67d0de5a953d8f1de0e0c9f807b981693ecf440796bedaeea987a8e9 not found: ID does not exist" containerID="1b7c5e8d67d0de5a953d8f1de0e0c9f807b981693ecf440796bedaeea987a8e9" Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.620198 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b7c5e8d67d0de5a953d8f1de0e0c9f807b981693ecf440796bedaeea987a8e9"} err="failed to get container status \"1b7c5e8d67d0de5a953d8f1de0e0c9f807b981693ecf440796bedaeea987a8e9\": rpc error: code = NotFound desc = could not find container \"1b7c5e8d67d0de5a953d8f1de0e0c9f807b981693ecf440796bedaeea987a8e9\": container with ID starting with 1b7c5e8d67d0de5a953d8f1de0e0c9f807b981693ecf440796bedaeea987a8e9 not found: ID does not exist" Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.667347 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cb73c44-e995-4e73-9bd3-422c00633ddf-utilities\") pod \"5cb73c44-e995-4e73-9bd3-422c00633ddf\" (UID: \"5cb73c44-e995-4e73-9bd3-422c00633ddf\") " Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.667430 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cb73c44-e995-4e73-9bd3-422c00633ddf-catalog-content\") pod \"5cb73c44-e995-4e73-9bd3-422c00633ddf\" (UID: \"5cb73c44-e995-4e73-9bd3-422c00633ddf\") " Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.667502 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74nx8\" (UniqueName: \"kubernetes.io/projected/5cb73c44-e995-4e73-9bd3-422c00633ddf-kube-api-access-74nx8\") pod \"5cb73c44-e995-4e73-9bd3-422c00633ddf\" (UID: \"5cb73c44-e995-4e73-9bd3-422c00633ddf\") " Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.668510 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cb73c44-e995-4e73-9bd3-422c00633ddf-utilities" (OuterVolumeSpecName: "utilities") pod "5cb73c44-e995-4e73-9bd3-422c00633ddf" (UID: "5cb73c44-e995-4e73-9bd3-422c00633ddf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.669251 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cb73c44-e995-4e73-9bd3-422c00633ddf-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.675841 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cb73c44-e995-4e73-9bd3-422c00633ddf-kube-api-access-74nx8" (OuterVolumeSpecName: "kube-api-access-74nx8") pod "5cb73c44-e995-4e73-9bd3-422c00633ddf" (UID: "5cb73c44-e995-4e73-9bd3-422c00633ddf"). InnerVolumeSpecName "kube-api-access-74nx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.713439 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cb73c44-e995-4e73-9bd3-422c00633ddf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5cb73c44-e995-4e73-9bd3-422c00633ddf" (UID: "5cb73c44-e995-4e73-9bd3-422c00633ddf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.771014 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cb73c44-e995-4e73-9bd3-422c00633ddf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.771054 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74nx8\" (UniqueName: \"kubernetes.io/projected/5cb73c44-e995-4e73-9bd3-422c00633ddf-kube-api-access-74nx8\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.886273 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l85pg"] Dec 04 17:34:50 crc kubenswrapper[4631]: I1204 17:34:50.889750 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-l85pg"] Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.025715 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bmh96"] Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.026665 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bmh96" podUID="e633bf80-04ad-4770-abc5-5d453077543c" containerName="registry-server" containerID="cri-o://67a93955a8caa57944cee1cfde94a8f78ea5ef57934a0946566c57d489527adf" gracePeriod=30 Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.046703 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n9bhz"] Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.046935 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-n9bhz" podUID="885a0356-3029-482f-b3b5-3caa01e19c62" containerName="registry-server" containerID="cri-o://d7159ffc18c8ec88c3878c3576dda83febe7a006e45acb65f2dd5ddbfa777e76" gracePeriod=30 Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.076871 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d97wj"] Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.077111 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" podUID="dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a" containerName="marketplace-operator" containerID="cri-o://440fd9893a7c425a59523910df10ddb453c974398e8d56bfee3bbd2be62fa9f1" gracePeriod=30 Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.108048 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nx6gn"] Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.108311 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nx6gn" podUID="29451954-9708-42c3-87aa-e28d06a5c640" containerName="registry-server" containerID="cri-o://f5b443ba2e30f6566c7fa69c834a67662d42ac6e29b8726f82ce26c9e69d9b11" gracePeriod=30 Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.117418 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-54924"] Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.117678 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-54924" podUID="2ff644ad-bdd2-4306-918d-fe5a0bfbc964" containerName="registry-server" containerID="cri-o://a996ce528f4f8cde7bb3cdcaceb578f8c4f917bb4eaba7f5c8ac04585d0a2c47" gracePeriod=30 Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.122709 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-lpl45"] Dec 04 17:34:52 crc kubenswrapper[4631]: E1204 17:34:52.122975 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" containerName="extract-utilities" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.122988 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" containerName="extract-utilities" Dec 04 17:34:52 crc kubenswrapper[4631]: E1204 17:34:52.123001 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" containerName="registry-server" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.123009 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" containerName="registry-server" Dec 04 17:34:52 crc kubenswrapper[4631]: E1204 17:34:52.123021 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" containerName="extract-content" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.123030 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" containerName="extract-content" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.123177 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" containerName="registry-server" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.124592 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-lpl45" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.149831 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-lpl45"] Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.248600 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cb73c44-e995-4e73-9bd3-422c00633ddf" path="/var/lib/kubelet/pods/5cb73c44-e995-4e73-9bd3-422c00633ddf/volumes" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.296992 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsw86\" (UniqueName: \"kubernetes.io/projected/68b9b122-03d1-41c7-8910-62826c1eedbb-kube-api-access-nsw86\") pod \"marketplace-operator-79b997595-lpl45\" (UID: \"68b9b122-03d1-41c7-8910-62826c1eedbb\") " pod="openshift-marketplace/marketplace-operator-79b997595-lpl45" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.297127 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/68b9b122-03d1-41c7-8910-62826c1eedbb-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-lpl45\" (UID: \"68b9b122-03d1-41c7-8910-62826c1eedbb\") " pod="openshift-marketplace/marketplace-operator-79b997595-lpl45" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.297154 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/68b9b122-03d1-41c7-8910-62826c1eedbb-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-lpl45\" (UID: \"68b9b122-03d1-41c7-8910-62826c1eedbb\") " pod="openshift-marketplace/marketplace-operator-79b997595-lpl45" Dec 04 17:34:52 crc kubenswrapper[4631]: E1204 17:34:52.307184 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f5b443ba2e30f6566c7fa69c834a67662d42ac6e29b8726f82ce26c9e69d9b11 is running failed: container process not found" containerID="f5b443ba2e30f6566c7fa69c834a67662d42ac6e29b8726f82ce26c9e69d9b11" cmd=["grpc_health_probe","-addr=:50051"] Dec 04 17:34:52 crc kubenswrapper[4631]: E1204 17:34:52.307926 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f5b443ba2e30f6566c7fa69c834a67662d42ac6e29b8726f82ce26c9e69d9b11 is running failed: container process not found" containerID="f5b443ba2e30f6566c7fa69c834a67662d42ac6e29b8726f82ce26c9e69d9b11" cmd=["grpc_health_probe","-addr=:50051"] Dec 04 17:34:52 crc kubenswrapper[4631]: E1204 17:34:52.308425 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f5b443ba2e30f6566c7fa69c834a67662d42ac6e29b8726f82ce26c9e69d9b11 is running failed: container process not found" containerID="f5b443ba2e30f6566c7fa69c834a67662d42ac6e29b8726f82ce26c9e69d9b11" cmd=["grpc_health_probe","-addr=:50051"] Dec 04 17:34:52 crc kubenswrapper[4631]: E1204 17:34:52.308476 4631 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f5b443ba2e30f6566c7fa69c834a67662d42ac6e29b8726f82ce26c9e69d9b11 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-nx6gn" podUID="29451954-9708-42c3-87aa-e28d06a5c640" containerName="registry-server" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.399178 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/68b9b122-03d1-41c7-8910-62826c1eedbb-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-lpl45\" (UID: \"68b9b122-03d1-41c7-8910-62826c1eedbb\") " pod="openshift-marketplace/marketplace-operator-79b997595-lpl45" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.399236 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/68b9b122-03d1-41c7-8910-62826c1eedbb-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-lpl45\" (UID: \"68b9b122-03d1-41c7-8910-62826c1eedbb\") " pod="openshift-marketplace/marketplace-operator-79b997595-lpl45" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.399278 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsw86\" (UniqueName: \"kubernetes.io/projected/68b9b122-03d1-41c7-8910-62826c1eedbb-kube-api-access-nsw86\") pod \"marketplace-operator-79b997595-lpl45\" (UID: \"68b9b122-03d1-41c7-8910-62826c1eedbb\") " pod="openshift-marketplace/marketplace-operator-79b997595-lpl45" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.401564 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/68b9b122-03d1-41c7-8910-62826c1eedbb-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-lpl45\" (UID: \"68b9b122-03d1-41c7-8910-62826c1eedbb\") " pod="openshift-marketplace/marketplace-operator-79b997595-lpl45" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.420056 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsw86\" (UniqueName: \"kubernetes.io/projected/68b9b122-03d1-41c7-8910-62826c1eedbb-kube-api-access-nsw86\") pod \"marketplace-operator-79b997595-lpl45\" (UID: \"68b9b122-03d1-41c7-8910-62826c1eedbb\") " pod="openshift-marketplace/marketplace-operator-79b997595-lpl45" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.420227 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/68b9b122-03d1-41c7-8910-62826c1eedbb-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-lpl45\" (UID: \"68b9b122-03d1-41c7-8910-62826c1eedbb\") " pod="openshift-marketplace/marketplace-operator-79b997595-lpl45" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.450547 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-lpl45" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.597926 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9bhz" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.602509 4631 generic.go:334] "Generic (PLEG): container finished" podID="885a0356-3029-482f-b3b5-3caa01e19c62" containerID="d7159ffc18c8ec88c3878c3576dda83febe7a006e45acb65f2dd5ddbfa777e76" exitCode=0 Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.602579 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9bhz" event={"ID":"885a0356-3029-482f-b3b5-3caa01e19c62","Type":"ContainerDied","Data":"d7159ffc18c8ec88c3878c3576dda83febe7a006e45acb65f2dd5ddbfa777e76"} Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.602604 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9bhz" event={"ID":"885a0356-3029-482f-b3b5-3caa01e19c62","Type":"ContainerDied","Data":"e89f4bb68089b566fb45b4a334d1a7d58645ab1bba453540506ed6f288befb36"} Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.602621 4631 scope.go:117] "RemoveContainer" containerID="d7159ffc18c8ec88c3878c3576dda83febe7a006e45acb65f2dd5ddbfa777e76" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.622096 4631 generic.go:334] "Generic (PLEG): container finished" podID="dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a" containerID="440fd9893a7c425a59523910df10ddb453c974398e8d56bfee3bbd2be62fa9f1" exitCode=0 Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.622190 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" event={"ID":"dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a","Type":"ContainerDied","Data":"440fd9893a7c425a59523910df10ddb453c974398e8d56bfee3bbd2be62fa9f1"} Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.639849 4631 generic.go:334] "Generic (PLEG): container finished" podID="e633bf80-04ad-4770-abc5-5d453077543c" containerID="67a93955a8caa57944cee1cfde94a8f78ea5ef57934a0946566c57d489527adf" exitCode=0 Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.639916 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bmh96" event={"ID":"e633bf80-04ad-4770-abc5-5d453077543c","Type":"ContainerDied","Data":"67a93955a8caa57944cee1cfde94a8f78ea5ef57934a0946566c57d489527adf"} Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.642775 4631 generic.go:334] "Generic (PLEG): container finished" podID="2ff644ad-bdd2-4306-918d-fe5a0bfbc964" containerID="a996ce528f4f8cde7bb3cdcaceb578f8c4f917bb4eaba7f5c8ac04585d0a2c47" exitCode=0 Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.642819 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54924" event={"ID":"2ff644ad-bdd2-4306-918d-fe5a0bfbc964","Type":"ContainerDied","Data":"a996ce528f4f8cde7bb3cdcaceb578f8c4f917bb4eaba7f5c8ac04585d0a2c47"} Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.644323 4631 generic.go:334] "Generic (PLEG): container finished" podID="29451954-9708-42c3-87aa-e28d06a5c640" containerID="f5b443ba2e30f6566c7fa69c834a67662d42ac6e29b8726f82ce26c9e69d9b11" exitCode=0 Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.644347 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nx6gn" event={"ID":"29451954-9708-42c3-87aa-e28d06a5c640","Type":"ContainerDied","Data":"f5b443ba2e30f6566c7fa69c834a67662d42ac6e29b8726f82ce26c9e69d9b11"} Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.682124 4631 scope.go:117] "RemoveContainer" containerID="452eb3f344fb58845f606c81534a76a66aad0d17be38f08fdd3252552542cf5d" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.704820 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/885a0356-3029-482f-b3b5-3caa01e19c62-utilities\") pod \"885a0356-3029-482f-b3b5-3caa01e19c62\" (UID: \"885a0356-3029-482f-b3b5-3caa01e19c62\") " Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.704914 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pbc2n\" (UniqueName: \"kubernetes.io/projected/885a0356-3029-482f-b3b5-3caa01e19c62-kube-api-access-pbc2n\") pod \"885a0356-3029-482f-b3b5-3caa01e19c62\" (UID: \"885a0356-3029-482f-b3b5-3caa01e19c62\") " Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.704961 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/885a0356-3029-482f-b3b5-3caa01e19c62-catalog-content\") pod \"885a0356-3029-482f-b3b5-3caa01e19c62\" (UID: \"885a0356-3029-482f-b3b5-3caa01e19c62\") " Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.706519 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/885a0356-3029-482f-b3b5-3caa01e19c62-utilities" (OuterVolumeSpecName: "utilities") pod "885a0356-3029-482f-b3b5-3caa01e19c62" (UID: "885a0356-3029-482f-b3b5-3caa01e19c62"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.722534 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/885a0356-3029-482f-b3b5-3caa01e19c62-kube-api-access-pbc2n" (OuterVolumeSpecName: "kube-api-access-pbc2n") pod "885a0356-3029-482f-b3b5-3caa01e19c62" (UID: "885a0356-3029-482f-b3b5-3caa01e19c62"). InnerVolumeSpecName "kube-api-access-pbc2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.749643 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bmh96" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.782088 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/885a0356-3029-482f-b3b5-3caa01e19c62-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "885a0356-3029-482f-b3b5-3caa01e19c62" (UID: "885a0356-3029-482f-b3b5-3caa01e19c62"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.782267 4631 scope.go:117] "RemoveContainer" containerID="4df710cbb8fbf14e427d45ac8f9f634470cfe5209e7221d71fde297eb2064ec6" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.793154 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.816124 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/885a0356-3029-482f-b3b5-3caa01e19c62-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.816160 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pbc2n\" (UniqueName: \"kubernetes.io/projected/885a0356-3029-482f-b3b5-3caa01e19c62-kube-api-access-pbc2n\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.816168 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/885a0356-3029-482f-b3b5-3caa01e19c62-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.837323 4631 scope.go:117] "RemoveContainer" containerID="d7159ffc18c8ec88c3878c3576dda83febe7a006e45acb65f2dd5ddbfa777e76" Dec 04 17:34:52 crc kubenswrapper[4631]: E1204 17:34:52.838114 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7159ffc18c8ec88c3878c3576dda83febe7a006e45acb65f2dd5ddbfa777e76\": container with ID starting with d7159ffc18c8ec88c3878c3576dda83febe7a006e45acb65f2dd5ddbfa777e76 not found: ID does not exist" containerID="d7159ffc18c8ec88c3878c3576dda83febe7a006e45acb65f2dd5ddbfa777e76" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.838146 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7159ffc18c8ec88c3878c3576dda83febe7a006e45acb65f2dd5ddbfa777e76"} err="failed to get container status \"d7159ffc18c8ec88c3878c3576dda83febe7a006e45acb65f2dd5ddbfa777e76\": rpc error: code = NotFound desc = could not find container \"d7159ffc18c8ec88c3878c3576dda83febe7a006e45acb65f2dd5ddbfa777e76\": container with ID starting with d7159ffc18c8ec88c3878c3576dda83febe7a006e45acb65f2dd5ddbfa777e76 not found: ID does not exist" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.838167 4631 scope.go:117] "RemoveContainer" containerID="452eb3f344fb58845f606c81534a76a66aad0d17be38f08fdd3252552542cf5d" Dec 04 17:34:52 crc kubenswrapper[4631]: E1204 17:34:52.838526 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"452eb3f344fb58845f606c81534a76a66aad0d17be38f08fdd3252552542cf5d\": container with ID starting with 452eb3f344fb58845f606c81534a76a66aad0d17be38f08fdd3252552542cf5d not found: ID does not exist" containerID="452eb3f344fb58845f606c81534a76a66aad0d17be38f08fdd3252552542cf5d" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.838541 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"452eb3f344fb58845f606c81534a76a66aad0d17be38f08fdd3252552542cf5d"} err="failed to get container status \"452eb3f344fb58845f606c81534a76a66aad0d17be38f08fdd3252552542cf5d\": rpc error: code = NotFound desc = could not find container \"452eb3f344fb58845f606c81534a76a66aad0d17be38f08fdd3252552542cf5d\": container with ID starting with 452eb3f344fb58845f606c81534a76a66aad0d17be38f08fdd3252552542cf5d not found: ID does not exist" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.838553 4631 scope.go:117] "RemoveContainer" containerID="4df710cbb8fbf14e427d45ac8f9f634470cfe5209e7221d71fde297eb2064ec6" Dec 04 17:34:52 crc kubenswrapper[4631]: E1204 17:34:52.838806 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4df710cbb8fbf14e427d45ac8f9f634470cfe5209e7221d71fde297eb2064ec6\": container with ID starting with 4df710cbb8fbf14e427d45ac8f9f634470cfe5209e7221d71fde297eb2064ec6 not found: ID does not exist" containerID="4df710cbb8fbf14e427d45ac8f9f634470cfe5209e7221d71fde297eb2064ec6" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.838822 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4df710cbb8fbf14e427d45ac8f9f634470cfe5209e7221d71fde297eb2064ec6"} err="failed to get container status \"4df710cbb8fbf14e427d45ac8f9f634470cfe5209e7221d71fde297eb2064ec6\": rpc error: code = NotFound desc = could not find container \"4df710cbb8fbf14e427d45ac8f9f634470cfe5209e7221d71fde297eb2064ec6\": container with ID starting with 4df710cbb8fbf14e427d45ac8f9f634470cfe5209e7221d71fde297eb2064ec6 not found: ID does not exist" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.855868 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nx6gn" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.881083 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-54924" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.918813 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e633bf80-04ad-4770-abc5-5d453077543c-catalog-content\") pod \"e633bf80-04ad-4770-abc5-5d453077543c\" (UID: \"e633bf80-04ad-4770-abc5-5d453077543c\") " Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.918890 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e633bf80-04ad-4770-abc5-5d453077543c-utilities\") pod \"e633bf80-04ad-4770-abc5-5d453077543c\" (UID: \"e633bf80-04ad-4770-abc5-5d453077543c\") " Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.918916 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a-marketplace-trusted-ca\") pod \"dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a\" (UID: \"dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a\") " Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.918967 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fnrc2\" (UniqueName: \"kubernetes.io/projected/dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a-kube-api-access-fnrc2\") pod \"dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a\" (UID: \"dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a\") " Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.919012 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snnrg\" (UniqueName: \"kubernetes.io/projected/e633bf80-04ad-4770-abc5-5d453077543c-kube-api-access-snnrg\") pod \"e633bf80-04ad-4770-abc5-5d453077543c\" (UID: \"e633bf80-04ad-4770-abc5-5d453077543c\") " Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.919034 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a-marketplace-operator-metrics\") pod \"dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a\" (UID: \"dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a\") " Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.921924 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a" (UID: "dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.922936 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e633bf80-04ad-4770-abc5-5d453077543c-utilities" (OuterVolumeSpecName: "utilities") pod "e633bf80-04ad-4770-abc5-5d453077543c" (UID: "e633bf80-04ad-4770-abc5-5d453077543c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.924826 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e633bf80-04ad-4770-abc5-5d453077543c-kube-api-access-snnrg" (OuterVolumeSpecName: "kube-api-access-snnrg") pod "e633bf80-04ad-4770-abc5-5d453077543c" (UID: "e633bf80-04ad-4770-abc5-5d453077543c"). InnerVolumeSpecName "kube-api-access-snnrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.925027 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a" (UID: "dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.926468 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a-kube-api-access-fnrc2" (OuterVolumeSpecName: "kube-api-access-fnrc2") pod "dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a" (UID: "dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a"). InnerVolumeSpecName "kube-api-access-fnrc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:34:52 crc kubenswrapper[4631]: I1204 17:34:52.965876 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e633bf80-04ad-4770-abc5-5d453077543c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e633bf80-04ad-4770-abc5-5d453077543c" (UID: "e633bf80-04ad-4770-abc5-5d453077543c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.002174 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-lpl45"] Dec 04 17:34:53 crc kubenswrapper[4631]: W1204 17:34:53.010560 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod68b9b122_03d1_41c7_8910_62826c1eedbb.slice/crio-17e7e4ff61820b9b345da99496308b570d1aa6b2bfb99177172fdcfd77286dde WatchSource:0}: Error finding container 17e7e4ff61820b9b345da99496308b570d1aa6b2bfb99177172fdcfd77286dde: Status 404 returned error can't find the container with id 17e7e4ff61820b9b345da99496308b570d1aa6b2bfb99177172fdcfd77286dde Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.019876 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ff644ad-bdd2-4306-918d-fe5a0bfbc964-catalog-content\") pod \"2ff644ad-bdd2-4306-918d-fe5a0bfbc964\" (UID: \"2ff644ad-bdd2-4306-918d-fe5a0bfbc964\") " Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.019920 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ff644ad-bdd2-4306-918d-fe5a0bfbc964-utilities\") pod \"2ff644ad-bdd2-4306-918d-fe5a0bfbc964\" (UID: \"2ff644ad-bdd2-4306-918d-fe5a0bfbc964\") " Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.019966 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7gkq\" (UniqueName: \"kubernetes.io/projected/29451954-9708-42c3-87aa-e28d06a5c640-kube-api-access-b7gkq\") pod \"29451954-9708-42c3-87aa-e28d06a5c640\" (UID: \"29451954-9708-42c3-87aa-e28d06a5c640\") " Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.020006 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6jrn\" (UniqueName: \"kubernetes.io/projected/2ff644ad-bdd2-4306-918d-fe5a0bfbc964-kube-api-access-m6jrn\") pod \"2ff644ad-bdd2-4306-918d-fe5a0bfbc964\" (UID: \"2ff644ad-bdd2-4306-918d-fe5a0bfbc964\") " Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.020043 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29451954-9708-42c3-87aa-e28d06a5c640-catalog-content\") pod \"29451954-9708-42c3-87aa-e28d06a5c640\" (UID: \"29451954-9708-42c3-87aa-e28d06a5c640\") " Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.020846 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29451954-9708-42c3-87aa-e28d06a5c640-utilities\") pod \"29451954-9708-42c3-87aa-e28d06a5c640\" (UID: \"29451954-9708-42c3-87aa-e28d06a5c640\") " Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.021111 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e633bf80-04ad-4770-abc5-5d453077543c-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.021129 4631 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.021139 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fnrc2\" (UniqueName: \"kubernetes.io/projected/dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a-kube-api-access-fnrc2\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.021206 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snnrg\" (UniqueName: \"kubernetes.io/projected/e633bf80-04ad-4770-abc5-5d453077543c-kube-api-access-snnrg\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.021214 4631 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.021223 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e633bf80-04ad-4770-abc5-5d453077543c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.021388 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ff644ad-bdd2-4306-918d-fe5a0bfbc964-utilities" (OuterVolumeSpecName: "utilities") pod "2ff644ad-bdd2-4306-918d-fe5a0bfbc964" (UID: "2ff644ad-bdd2-4306-918d-fe5a0bfbc964"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.021726 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29451954-9708-42c3-87aa-e28d06a5c640-utilities" (OuterVolumeSpecName: "utilities") pod "29451954-9708-42c3-87aa-e28d06a5c640" (UID: "29451954-9708-42c3-87aa-e28d06a5c640"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.022989 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29451954-9708-42c3-87aa-e28d06a5c640-kube-api-access-b7gkq" (OuterVolumeSpecName: "kube-api-access-b7gkq") pod "29451954-9708-42c3-87aa-e28d06a5c640" (UID: "29451954-9708-42c3-87aa-e28d06a5c640"). InnerVolumeSpecName "kube-api-access-b7gkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.024215 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ff644ad-bdd2-4306-918d-fe5a0bfbc964-kube-api-access-m6jrn" (OuterVolumeSpecName: "kube-api-access-m6jrn") pod "2ff644ad-bdd2-4306-918d-fe5a0bfbc964" (UID: "2ff644ad-bdd2-4306-918d-fe5a0bfbc964"). InnerVolumeSpecName "kube-api-access-m6jrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.041839 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29451954-9708-42c3-87aa-e28d06a5c640-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "29451954-9708-42c3-87aa-e28d06a5c640" (UID: "29451954-9708-42c3-87aa-e28d06a5c640"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.122640 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7gkq\" (UniqueName: \"kubernetes.io/projected/29451954-9708-42c3-87aa-e28d06a5c640-kube-api-access-b7gkq\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.123164 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6jrn\" (UniqueName: \"kubernetes.io/projected/2ff644ad-bdd2-4306-918d-fe5a0bfbc964-kube-api-access-m6jrn\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.123182 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29451954-9708-42c3-87aa-e28d06a5c640-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.123194 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29451954-9708-42c3-87aa-e28d06a5c640-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.123209 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ff644ad-bdd2-4306-918d-fe5a0bfbc964-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.158505 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ff644ad-bdd2-4306-918d-fe5a0bfbc964-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2ff644ad-bdd2-4306-918d-fe5a0bfbc964" (UID: "2ff644ad-bdd2-4306-918d-fe5a0bfbc964"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.224587 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ff644ad-bdd2-4306-918d-fe5a0bfbc964-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.653259 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nx6gn" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.653341 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nx6gn" event={"ID":"29451954-9708-42c3-87aa-e28d06a5c640","Type":"ContainerDied","Data":"bebb6a658d63134aee4940514501c0dfebde56a72886f6168d3185325aa9356c"} Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.653515 4631 scope.go:117] "RemoveContainer" containerID="f5b443ba2e30f6566c7fa69c834a67662d42ac6e29b8726f82ce26c9e69d9b11" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.658269 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bmh96" event={"ID":"e633bf80-04ad-4770-abc5-5d453077543c","Type":"ContainerDied","Data":"fb1572ad85b74f5c351c1d8c1410f5380c11d1084cc3782588e45992dbee521b"} Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.658453 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bmh96" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.660765 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-54924" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.660811 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54924" event={"ID":"2ff644ad-bdd2-4306-918d-fe5a0bfbc964","Type":"ContainerDied","Data":"7724f8fc774de1812a1a442c164c257dc5ce42c5fdaf17eed20f65fe2f466b2f"} Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.663150 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9bhz" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.664624 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-lpl45" event={"ID":"68b9b122-03d1-41c7-8910-62826c1eedbb","Type":"ContainerStarted","Data":"13b32fee13d5bd1c052f8037bdb2443bba5ecd1a702f3a01afa4d78bc37e167a"} Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.664657 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-lpl45" event={"ID":"68b9b122-03d1-41c7-8910-62826c1eedbb","Type":"ContainerStarted","Data":"17e7e4ff61820b9b345da99496308b570d1aa6b2bfb99177172fdcfd77286dde"} Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.666111 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-lpl45" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.671068 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-lpl45" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.671956 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" event={"ID":"dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a","Type":"ContainerDied","Data":"00d2a0be36f2c74451f3de6d12ca5cad2e80bb18fb3eae8e6d7602bdd402c15c"} Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.672032 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-d97wj" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.688774 4631 scope.go:117] "RemoveContainer" containerID="c710697965be352c27146a6fd20506de8ac49b6e1772fa907a84fba829d725c1" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.711055 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-lpl45" podStartSLOduration=1.7110349710000001 podStartE2EDuration="1.711034971s" podCreationTimestamp="2025-12-04 17:34:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:34:53.704981456 +0000 UTC m=+423.737223464" watchObservedRunningTime="2025-12-04 17:34:53.711034971 +0000 UTC m=+423.743276969" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.728592 4631 scope.go:117] "RemoveContainer" containerID="9847af89e7a30b4ca903117249fd363ad289b58e1a43c7667250a155dc427cc6" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.738076 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nx6gn"] Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.744446 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nx6gn"] Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.758179 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bmh96"] Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.762828 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bmh96"] Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.764482 4631 scope.go:117] "RemoveContainer" containerID="67a93955a8caa57944cee1cfde94a8f78ea5ef57934a0946566c57d489527adf" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.784061 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d97wj"] Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.789210 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d97wj"] Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.805405 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n9bhz"] Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.809776 4631 scope.go:117] "RemoveContainer" containerID="461fb2020d0e2fd89b333a84f8a71f48c69ec012a1d2e9fe2377cfd96e9043e0" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.812721 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-n9bhz"] Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.820470 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-54924"] Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.824716 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-54924"] Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.837545 4631 scope.go:117] "RemoveContainer" containerID="cb0add41048cb2f19877f93ebb7bc819da3d6d4391abc58bde21eea66cb1daf7" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.857581 4631 scope.go:117] "RemoveContainer" containerID="a996ce528f4f8cde7bb3cdcaceb578f8c4f917bb4eaba7f5c8ac04585d0a2c47" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.884066 4631 scope.go:117] "RemoveContainer" containerID="5ad646934ff2ff03f29cb8d3cd1717852d14e6fe9455ac79fffdd0e2dbb683bf" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.901812 4631 scope.go:117] "RemoveContainer" containerID="634a43bc8db0d8833b2ebf3cc0ca18082041fb28a19e9d3b37ac33845abdf675" Dec 04 17:34:53 crc kubenswrapper[4631]: I1204 17:34:53.922802 4631 scope.go:117] "RemoveContainer" containerID="440fd9893a7c425a59523910df10ddb453c974398e8d56bfee3bbd2be62fa9f1" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.245346 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29451954-9708-42c3-87aa-e28d06a5c640" path="/var/lib/kubelet/pods/29451954-9708-42c3-87aa-e28d06a5c640/volumes" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.246304 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ff644ad-bdd2-4306-918d-fe5a0bfbc964" path="/var/lib/kubelet/pods/2ff644ad-bdd2-4306-918d-fe5a0bfbc964/volumes" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.247226 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="885a0356-3029-482f-b3b5-3caa01e19c62" path="/var/lib/kubelet/pods/885a0356-3029-482f-b3b5-3caa01e19c62/volumes" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.248965 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a" path="/var/lib/kubelet/pods/dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a/volumes" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.249734 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e633bf80-04ad-4770-abc5-5d453077543c" path="/var/lib/kubelet/pods/e633bf80-04ad-4770-abc5-5d453077543c/volumes" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.393155 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-n859d"] Dec 04 17:34:54 crc kubenswrapper[4631]: E1204 17:34:54.393814 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e633bf80-04ad-4770-abc5-5d453077543c" containerName="extract-utilities" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.393827 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="e633bf80-04ad-4770-abc5-5d453077543c" containerName="extract-utilities" Dec 04 17:34:54 crc kubenswrapper[4631]: E1204 17:34:54.393842 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29451954-9708-42c3-87aa-e28d06a5c640" containerName="extract-content" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.393848 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="29451954-9708-42c3-87aa-e28d06a5c640" containerName="extract-content" Dec 04 17:34:54 crc kubenswrapper[4631]: E1204 17:34:54.393858 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e633bf80-04ad-4770-abc5-5d453077543c" containerName="registry-server" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.393866 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="e633bf80-04ad-4770-abc5-5d453077543c" containerName="registry-server" Dec 04 17:34:54 crc kubenswrapper[4631]: E1204 17:34:54.393875 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a" containerName="marketplace-operator" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.393881 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a" containerName="marketplace-operator" Dec 04 17:34:54 crc kubenswrapper[4631]: E1204 17:34:54.393892 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ff644ad-bdd2-4306-918d-fe5a0bfbc964" containerName="registry-server" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.393899 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ff644ad-bdd2-4306-918d-fe5a0bfbc964" containerName="registry-server" Dec 04 17:34:54 crc kubenswrapper[4631]: E1204 17:34:54.393906 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885a0356-3029-482f-b3b5-3caa01e19c62" containerName="registry-server" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.393912 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="885a0356-3029-482f-b3b5-3caa01e19c62" containerName="registry-server" Dec 04 17:34:54 crc kubenswrapper[4631]: E1204 17:34:54.393922 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885a0356-3029-482f-b3b5-3caa01e19c62" containerName="extract-content" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.393927 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="885a0356-3029-482f-b3b5-3caa01e19c62" containerName="extract-content" Dec 04 17:34:54 crc kubenswrapper[4631]: E1204 17:34:54.393937 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ff644ad-bdd2-4306-918d-fe5a0bfbc964" containerName="extract-utilities" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.393945 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ff644ad-bdd2-4306-918d-fe5a0bfbc964" containerName="extract-utilities" Dec 04 17:34:54 crc kubenswrapper[4631]: E1204 17:34:54.393954 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885a0356-3029-482f-b3b5-3caa01e19c62" containerName="extract-utilities" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.393960 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="885a0356-3029-482f-b3b5-3caa01e19c62" containerName="extract-utilities" Dec 04 17:34:54 crc kubenswrapper[4631]: E1204 17:34:54.393967 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ff644ad-bdd2-4306-918d-fe5a0bfbc964" containerName="extract-content" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.393973 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ff644ad-bdd2-4306-918d-fe5a0bfbc964" containerName="extract-content" Dec 04 17:34:54 crc kubenswrapper[4631]: E1204 17:34:54.393980 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e633bf80-04ad-4770-abc5-5d453077543c" containerName="extract-content" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.393985 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="e633bf80-04ad-4770-abc5-5d453077543c" containerName="extract-content" Dec 04 17:34:54 crc kubenswrapper[4631]: E1204 17:34:54.393993 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29451954-9708-42c3-87aa-e28d06a5c640" containerName="registry-server" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.393999 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="29451954-9708-42c3-87aa-e28d06a5c640" containerName="registry-server" Dec 04 17:34:54 crc kubenswrapper[4631]: E1204 17:34:54.394006 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29451954-9708-42c3-87aa-e28d06a5c640" containerName="extract-utilities" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.394012 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="29451954-9708-42c3-87aa-e28d06a5c640" containerName="extract-utilities" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.394093 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="29451954-9708-42c3-87aa-e28d06a5c640" containerName="registry-server" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.394104 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcf8ab5e-a0db-4005-ad40-4bf2d5068b4a" containerName="marketplace-operator" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.394115 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="e633bf80-04ad-4770-abc5-5d453077543c" containerName="registry-server" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.394121 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="885a0356-3029-482f-b3b5-3caa01e19c62" containerName="registry-server" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.394130 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ff644ad-bdd2-4306-918d-fe5a0bfbc964" containerName="registry-server" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.395445 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n859d" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.397405 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.405277 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-n859d"] Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.547946 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d32387e-c5ff-4cf8-8383-38bf64325277-utilities\") pod \"certified-operators-n859d\" (UID: \"4d32387e-c5ff-4cf8-8383-38bf64325277\") " pod="openshift-marketplace/certified-operators-n859d" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.548938 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d32387e-c5ff-4cf8-8383-38bf64325277-catalog-content\") pod \"certified-operators-n859d\" (UID: \"4d32387e-c5ff-4cf8-8383-38bf64325277\") " pod="openshift-marketplace/certified-operators-n859d" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.548992 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djv76\" (UniqueName: \"kubernetes.io/projected/4d32387e-c5ff-4cf8-8383-38bf64325277-kube-api-access-djv76\") pod \"certified-operators-n859d\" (UID: \"4d32387e-c5ff-4cf8-8383-38bf64325277\") " pod="openshift-marketplace/certified-operators-n859d" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.650057 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d32387e-c5ff-4cf8-8383-38bf64325277-catalog-content\") pod \"certified-operators-n859d\" (UID: \"4d32387e-c5ff-4cf8-8383-38bf64325277\") " pod="openshift-marketplace/certified-operators-n859d" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.650119 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djv76\" (UniqueName: \"kubernetes.io/projected/4d32387e-c5ff-4cf8-8383-38bf64325277-kube-api-access-djv76\") pod \"certified-operators-n859d\" (UID: \"4d32387e-c5ff-4cf8-8383-38bf64325277\") " pod="openshift-marketplace/certified-operators-n859d" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.650193 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d32387e-c5ff-4cf8-8383-38bf64325277-utilities\") pod \"certified-operators-n859d\" (UID: \"4d32387e-c5ff-4cf8-8383-38bf64325277\") " pod="openshift-marketplace/certified-operators-n859d" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.650719 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d32387e-c5ff-4cf8-8383-38bf64325277-catalog-content\") pod \"certified-operators-n859d\" (UID: \"4d32387e-c5ff-4cf8-8383-38bf64325277\") " pod="openshift-marketplace/certified-operators-n859d" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.654574 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d32387e-c5ff-4cf8-8383-38bf64325277-utilities\") pod \"certified-operators-n859d\" (UID: \"4d32387e-c5ff-4cf8-8383-38bf64325277\") " pod="openshift-marketplace/certified-operators-n859d" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.676070 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djv76\" (UniqueName: \"kubernetes.io/projected/4d32387e-c5ff-4cf8-8383-38bf64325277-kube-api-access-djv76\") pod \"certified-operators-n859d\" (UID: \"4d32387e-c5ff-4cf8-8383-38bf64325277\") " pod="openshift-marketplace/certified-operators-n859d" Dec 04 17:34:54 crc kubenswrapper[4631]: I1204 17:34:54.755893 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n859d" Dec 04 17:34:55 crc kubenswrapper[4631]: I1204 17:34:55.174410 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-n859d"] Dec 04 17:34:55 crc kubenswrapper[4631]: I1204 17:34:55.688099 4631 generic.go:334] "Generic (PLEG): container finished" podID="4d32387e-c5ff-4cf8-8383-38bf64325277" containerID="2b92f0a41aaf28ffc8a02ceb2b9ef573278478553f36300f5fa5ac40007b6f80" exitCode=0 Dec 04 17:34:55 crc kubenswrapper[4631]: I1204 17:34:55.690073 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n859d" event={"ID":"4d32387e-c5ff-4cf8-8383-38bf64325277","Type":"ContainerDied","Data":"2b92f0a41aaf28ffc8a02ceb2b9ef573278478553f36300f5fa5ac40007b6f80"} Dec 04 17:34:55 crc kubenswrapper[4631]: I1204 17:34:55.690103 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n859d" event={"ID":"4d32387e-c5ff-4cf8-8383-38bf64325277","Type":"ContainerStarted","Data":"50f1a1c2a84c35b67680383885e64c4945f5d72549b248973888c907ba30d3ae"} Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.202859 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tzprv"] Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.204344 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tzprv" Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.206448 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.212131 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tzprv"] Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.374850 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8x7m\" (UniqueName: \"kubernetes.io/projected/575a3ee8-f538-4c96-8067-564def2cc3ff-kube-api-access-b8x7m\") pod \"redhat-marketplace-tzprv\" (UID: \"575a3ee8-f538-4c96-8067-564def2cc3ff\") " pod="openshift-marketplace/redhat-marketplace-tzprv" Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.375011 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/575a3ee8-f538-4c96-8067-564def2cc3ff-utilities\") pod \"redhat-marketplace-tzprv\" (UID: \"575a3ee8-f538-4c96-8067-564def2cc3ff\") " pod="openshift-marketplace/redhat-marketplace-tzprv" Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.375171 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/575a3ee8-f538-4c96-8067-564def2cc3ff-catalog-content\") pod \"redhat-marketplace-tzprv\" (UID: \"575a3ee8-f538-4c96-8067-564def2cc3ff\") " pod="openshift-marketplace/redhat-marketplace-tzprv" Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.477100 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8x7m\" (UniqueName: \"kubernetes.io/projected/575a3ee8-f538-4c96-8067-564def2cc3ff-kube-api-access-b8x7m\") pod \"redhat-marketplace-tzprv\" (UID: \"575a3ee8-f538-4c96-8067-564def2cc3ff\") " pod="openshift-marketplace/redhat-marketplace-tzprv" Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.477549 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/575a3ee8-f538-4c96-8067-564def2cc3ff-utilities\") pod \"redhat-marketplace-tzprv\" (UID: \"575a3ee8-f538-4c96-8067-564def2cc3ff\") " pod="openshift-marketplace/redhat-marketplace-tzprv" Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.477656 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/575a3ee8-f538-4c96-8067-564def2cc3ff-catalog-content\") pod \"redhat-marketplace-tzprv\" (UID: \"575a3ee8-f538-4c96-8067-564def2cc3ff\") " pod="openshift-marketplace/redhat-marketplace-tzprv" Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.478107 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/575a3ee8-f538-4c96-8067-564def2cc3ff-catalog-content\") pod \"redhat-marketplace-tzprv\" (UID: \"575a3ee8-f538-4c96-8067-564def2cc3ff\") " pod="openshift-marketplace/redhat-marketplace-tzprv" Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.478162 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/575a3ee8-f538-4c96-8067-564def2cc3ff-utilities\") pod \"redhat-marketplace-tzprv\" (UID: \"575a3ee8-f538-4c96-8067-564def2cc3ff\") " pod="openshift-marketplace/redhat-marketplace-tzprv" Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.500063 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8x7m\" (UniqueName: \"kubernetes.io/projected/575a3ee8-f538-4c96-8067-564def2cc3ff-kube-api-access-b8x7m\") pod \"redhat-marketplace-tzprv\" (UID: \"575a3ee8-f538-4c96-8067-564def2cc3ff\") " pod="openshift-marketplace/redhat-marketplace-tzprv" Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.531512 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tzprv" Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.695557 4631 generic.go:334] "Generic (PLEG): container finished" podID="4d32387e-c5ff-4cf8-8383-38bf64325277" containerID="757efba221f6fff72c228d3085528b237a8666ee3e7f8f4c89840203f99cf1b8" exitCode=0 Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.695607 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n859d" event={"ID":"4d32387e-c5ff-4cf8-8383-38bf64325277","Type":"ContainerDied","Data":"757efba221f6fff72c228d3085528b237a8666ee3e7f8f4c89840203f99cf1b8"} Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.808330 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cfh7w"] Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.810106 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cfh7w" Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.812309 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cfh7w"] Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.814652 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.934580 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tzprv"] Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.984604 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e28d58eb-8c1a-4e76-87fc-aefb35295f30-catalog-content\") pod \"redhat-operators-cfh7w\" (UID: \"e28d58eb-8c1a-4e76-87fc-aefb35295f30\") " pod="openshift-marketplace/redhat-operators-cfh7w" Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.984679 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e28d58eb-8c1a-4e76-87fc-aefb35295f30-utilities\") pod \"redhat-operators-cfh7w\" (UID: \"e28d58eb-8c1a-4e76-87fc-aefb35295f30\") " pod="openshift-marketplace/redhat-operators-cfh7w" Dec 04 17:34:56 crc kubenswrapper[4631]: I1204 17:34:56.984724 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzmtq\" (UniqueName: \"kubernetes.io/projected/e28d58eb-8c1a-4e76-87fc-aefb35295f30-kube-api-access-hzmtq\") pod \"redhat-operators-cfh7w\" (UID: \"e28d58eb-8c1a-4e76-87fc-aefb35295f30\") " pod="openshift-marketplace/redhat-operators-cfh7w" Dec 04 17:34:57 crc kubenswrapper[4631]: I1204 17:34:57.088186 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e28d58eb-8c1a-4e76-87fc-aefb35295f30-utilities\") pod \"redhat-operators-cfh7w\" (UID: \"e28d58eb-8c1a-4e76-87fc-aefb35295f30\") " pod="openshift-marketplace/redhat-operators-cfh7w" Dec 04 17:34:57 crc kubenswrapper[4631]: I1204 17:34:57.088260 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzmtq\" (UniqueName: \"kubernetes.io/projected/e28d58eb-8c1a-4e76-87fc-aefb35295f30-kube-api-access-hzmtq\") pod \"redhat-operators-cfh7w\" (UID: \"e28d58eb-8c1a-4e76-87fc-aefb35295f30\") " pod="openshift-marketplace/redhat-operators-cfh7w" Dec 04 17:34:57 crc kubenswrapper[4631]: I1204 17:34:57.088420 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e28d58eb-8c1a-4e76-87fc-aefb35295f30-catalog-content\") pod \"redhat-operators-cfh7w\" (UID: \"e28d58eb-8c1a-4e76-87fc-aefb35295f30\") " pod="openshift-marketplace/redhat-operators-cfh7w" Dec 04 17:34:57 crc kubenswrapper[4631]: I1204 17:34:57.091034 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e28d58eb-8c1a-4e76-87fc-aefb35295f30-utilities\") pod \"redhat-operators-cfh7w\" (UID: \"e28d58eb-8c1a-4e76-87fc-aefb35295f30\") " pod="openshift-marketplace/redhat-operators-cfh7w" Dec 04 17:34:57 crc kubenswrapper[4631]: I1204 17:34:57.091158 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e28d58eb-8c1a-4e76-87fc-aefb35295f30-catalog-content\") pod \"redhat-operators-cfh7w\" (UID: \"e28d58eb-8c1a-4e76-87fc-aefb35295f30\") " pod="openshift-marketplace/redhat-operators-cfh7w" Dec 04 17:34:57 crc kubenswrapper[4631]: I1204 17:34:57.110472 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzmtq\" (UniqueName: \"kubernetes.io/projected/e28d58eb-8c1a-4e76-87fc-aefb35295f30-kube-api-access-hzmtq\") pod \"redhat-operators-cfh7w\" (UID: \"e28d58eb-8c1a-4e76-87fc-aefb35295f30\") " pod="openshift-marketplace/redhat-operators-cfh7w" Dec 04 17:34:57 crc kubenswrapper[4631]: I1204 17:34:57.138809 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cfh7w" Dec 04 17:34:57 crc kubenswrapper[4631]: I1204 17:34:57.350341 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cfh7w"] Dec 04 17:34:57 crc kubenswrapper[4631]: W1204 17:34:57.363655 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode28d58eb_8c1a_4e76_87fc_aefb35295f30.slice/crio-f5797accb03137ca471bd2767521904ff56996d638f44170af80b1ecbad964f5 WatchSource:0}: Error finding container f5797accb03137ca471bd2767521904ff56996d638f44170af80b1ecbad964f5: Status 404 returned error can't find the container with id f5797accb03137ca471bd2767521904ff56996d638f44170af80b1ecbad964f5 Dec 04 17:34:57 crc kubenswrapper[4631]: I1204 17:34:57.701894 4631 generic.go:334] "Generic (PLEG): container finished" podID="e28d58eb-8c1a-4e76-87fc-aefb35295f30" containerID="ce5df8a70e01a821557a846d6ae29dcea96f9959b5f9b2dee3180b5ea4c5c1cc" exitCode=0 Dec 04 17:34:57 crc kubenswrapper[4631]: I1204 17:34:57.701990 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cfh7w" event={"ID":"e28d58eb-8c1a-4e76-87fc-aefb35295f30","Type":"ContainerDied","Data":"ce5df8a70e01a821557a846d6ae29dcea96f9959b5f9b2dee3180b5ea4c5c1cc"} Dec 04 17:34:57 crc kubenswrapper[4631]: I1204 17:34:57.702033 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cfh7w" event={"ID":"e28d58eb-8c1a-4e76-87fc-aefb35295f30","Type":"ContainerStarted","Data":"f5797accb03137ca471bd2767521904ff56996d638f44170af80b1ecbad964f5"} Dec 04 17:34:57 crc kubenswrapper[4631]: I1204 17:34:57.704804 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n859d" event={"ID":"4d32387e-c5ff-4cf8-8383-38bf64325277","Type":"ContainerStarted","Data":"a998fd61eca91ce3044bfd104f6a00ec156f5dedc1b2d20fb0590243b5f90f3e"} Dec 04 17:34:57 crc kubenswrapper[4631]: I1204 17:34:57.720880 4631 generic.go:334] "Generic (PLEG): container finished" podID="575a3ee8-f538-4c96-8067-564def2cc3ff" containerID="54961479b6d121260589e14aee7bd1a880bb9045e9235cc539ce6d473191b817" exitCode=0 Dec 04 17:34:57 crc kubenswrapper[4631]: I1204 17:34:57.720937 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tzprv" event={"ID":"575a3ee8-f538-4c96-8067-564def2cc3ff","Type":"ContainerDied","Data":"54961479b6d121260589e14aee7bd1a880bb9045e9235cc539ce6d473191b817"} Dec 04 17:34:57 crc kubenswrapper[4631]: I1204 17:34:57.720977 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tzprv" event={"ID":"575a3ee8-f538-4c96-8067-564def2cc3ff","Type":"ContainerStarted","Data":"a529ab0d5684f955bf29cdecbcd092992a630af8d1fd3b9cc820f8315d83ce83"} Dec 04 17:34:57 crc kubenswrapper[4631]: I1204 17:34:57.746998 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-n859d" podStartSLOduration=2.336889713 podStartE2EDuration="3.746980337s" podCreationTimestamp="2025-12-04 17:34:54 +0000 UTC" firstStartedPulling="2025-12-04 17:34:55.692089957 +0000 UTC m=+425.724331955" lastFinishedPulling="2025-12-04 17:34:57.102180571 +0000 UTC m=+427.134422579" observedRunningTime="2025-12-04 17:34:57.745454703 +0000 UTC m=+427.777696701" watchObservedRunningTime="2025-12-04 17:34:57.746980337 +0000 UTC m=+427.779222335" Dec 04 17:34:58 crc kubenswrapper[4631]: I1204 17:34:58.598679 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-db4qf"] Dec 04 17:34:58 crc kubenswrapper[4631]: I1204 17:34:58.600447 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-db4qf" Dec 04 17:34:58 crc kubenswrapper[4631]: I1204 17:34:58.604522 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 04 17:34:58 crc kubenswrapper[4631]: I1204 17:34:58.616552 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-db4qf"] Dec 04 17:34:58 crc kubenswrapper[4631]: I1204 17:34:58.711008 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rpb9\" (UniqueName: \"kubernetes.io/projected/5267c3bf-4068-4178-9e3e-9a24e1c11a5e-kube-api-access-4rpb9\") pod \"community-operators-db4qf\" (UID: \"5267c3bf-4068-4178-9e3e-9a24e1c11a5e\") " pod="openshift-marketplace/community-operators-db4qf" Dec 04 17:34:58 crc kubenswrapper[4631]: I1204 17:34:58.711072 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5267c3bf-4068-4178-9e3e-9a24e1c11a5e-utilities\") pod \"community-operators-db4qf\" (UID: \"5267c3bf-4068-4178-9e3e-9a24e1c11a5e\") " pod="openshift-marketplace/community-operators-db4qf" Dec 04 17:34:58 crc kubenswrapper[4631]: I1204 17:34:58.711152 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5267c3bf-4068-4178-9e3e-9a24e1c11a5e-catalog-content\") pod \"community-operators-db4qf\" (UID: \"5267c3bf-4068-4178-9e3e-9a24e1c11a5e\") " pod="openshift-marketplace/community-operators-db4qf" Dec 04 17:34:58 crc kubenswrapper[4631]: I1204 17:34:58.729937 4631 generic.go:334] "Generic (PLEG): container finished" podID="575a3ee8-f538-4c96-8067-564def2cc3ff" containerID="36c5bfaff2342ffde8fdb77db693524a88ae9e131e964de91372689342882f0c" exitCode=0 Dec 04 17:34:58 crc kubenswrapper[4631]: I1204 17:34:58.730027 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tzprv" event={"ID":"575a3ee8-f538-4c96-8067-564def2cc3ff","Type":"ContainerDied","Data":"36c5bfaff2342ffde8fdb77db693524a88ae9e131e964de91372689342882f0c"} Dec 04 17:34:58 crc kubenswrapper[4631]: I1204 17:34:58.734235 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cfh7w" event={"ID":"e28d58eb-8c1a-4e76-87fc-aefb35295f30","Type":"ContainerStarted","Data":"7e22e76922e168a343f9e842ed8cc765f952b4693cacf4388b108ce83ac781a0"} Dec 04 17:34:58 crc kubenswrapper[4631]: I1204 17:34:58.812413 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5267c3bf-4068-4178-9e3e-9a24e1c11a5e-utilities\") pod \"community-operators-db4qf\" (UID: \"5267c3bf-4068-4178-9e3e-9a24e1c11a5e\") " pod="openshift-marketplace/community-operators-db4qf" Dec 04 17:34:58 crc kubenswrapper[4631]: I1204 17:34:58.812893 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5267c3bf-4068-4178-9e3e-9a24e1c11a5e-catalog-content\") pod \"community-operators-db4qf\" (UID: \"5267c3bf-4068-4178-9e3e-9a24e1c11a5e\") " pod="openshift-marketplace/community-operators-db4qf" Dec 04 17:34:58 crc kubenswrapper[4631]: I1204 17:34:58.813137 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rpb9\" (UniqueName: \"kubernetes.io/projected/5267c3bf-4068-4178-9e3e-9a24e1c11a5e-kube-api-access-4rpb9\") pod \"community-operators-db4qf\" (UID: \"5267c3bf-4068-4178-9e3e-9a24e1c11a5e\") " pod="openshift-marketplace/community-operators-db4qf" Dec 04 17:34:58 crc kubenswrapper[4631]: I1204 17:34:58.813638 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5267c3bf-4068-4178-9e3e-9a24e1c11a5e-catalog-content\") pod \"community-operators-db4qf\" (UID: \"5267c3bf-4068-4178-9e3e-9a24e1c11a5e\") " pod="openshift-marketplace/community-operators-db4qf" Dec 04 17:34:58 crc kubenswrapper[4631]: I1204 17:34:58.814036 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5267c3bf-4068-4178-9e3e-9a24e1c11a5e-utilities\") pod \"community-operators-db4qf\" (UID: \"5267c3bf-4068-4178-9e3e-9a24e1c11a5e\") " pod="openshift-marketplace/community-operators-db4qf" Dec 04 17:34:58 crc kubenswrapper[4631]: I1204 17:34:58.838400 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rpb9\" (UniqueName: \"kubernetes.io/projected/5267c3bf-4068-4178-9e3e-9a24e1c11a5e-kube-api-access-4rpb9\") pod \"community-operators-db4qf\" (UID: \"5267c3bf-4068-4178-9e3e-9a24e1c11a5e\") " pod="openshift-marketplace/community-operators-db4qf" Dec 04 17:34:58 crc kubenswrapper[4631]: I1204 17:34:58.919875 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-db4qf" Dec 04 17:34:59 crc kubenswrapper[4631]: I1204 17:34:59.368642 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-db4qf"] Dec 04 17:34:59 crc kubenswrapper[4631]: W1204 17:34:59.376738 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5267c3bf_4068_4178_9e3e_9a24e1c11a5e.slice/crio-83af5e31c32bfbfac9564d6c542b2c75c3f9fddfc928cef6831a69caad97e62a WatchSource:0}: Error finding container 83af5e31c32bfbfac9564d6c542b2c75c3f9fddfc928cef6831a69caad97e62a: Status 404 returned error can't find the container with id 83af5e31c32bfbfac9564d6c542b2c75c3f9fddfc928cef6831a69caad97e62a Dec 04 17:34:59 crc kubenswrapper[4631]: I1204 17:34:59.743853 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tzprv" event={"ID":"575a3ee8-f538-4c96-8067-564def2cc3ff","Type":"ContainerStarted","Data":"7877f577e8e11a5fa3c98ba0da62c8814eff59132d7c9eee08ce4c41d95c51aa"} Dec 04 17:34:59 crc kubenswrapper[4631]: I1204 17:34:59.747325 4631 generic.go:334] "Generic (PLEG): container finished" podID="e28d58eb-8c1a-4e76-87fc-aefb35295f30" containerID="7e22e76922e168a343f9e842ed8cc765f952b4693cacf4388b108ce83ac781a0" exitCode=0 Dec 04 17:34:59 crc kubenswrapper[4631]: I1204 17:34:59.747410 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cfh7w" event={"ID":"e28d58eb-8c1a-4e76-87fc-aefb35295f30","Type":"ContainerDied","Data":"7e22e76922e168a343f9e842ed8cc765f952b4693cacf4388b108ce83ac781a0"} Dec 04 17:34:59 crc kubenswrapper[4631]: I1204 17:34:59.750623 4631 generic.go:334] "Generic (PLEG): container finished" podID="5267c3bf-4068-4178-9e3e-9a24e1c11a5e" containerID="a8ba5f267e01801b47af9c94fbe9cbd1043eb6e8a4391d06826b1754856870e5" exitCode=0 Dec 04 17:34:59 crc kubenswrapper[4631]: I1204 17:34:59.750658 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-db4qf" event={"ID":"5267c3bf-4068-4178-9e3e-9a24e1c11a5e","Type":"ContainerDied","Data":"a8ba5f267e01801b47af9c94fbe9cbd1043eb6e8a4391d06826b1754856870e5"} Dec 04 17:34:59 crc kubenswrapper[4631]: I1204 17:34:59.750679 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-db4qf" event={"ID":"5267c3bf-4068-4178-9e3e-9a24e1c11a5e","Type":"ContainerStarted","Data":"83af5e31c32bfbfac9564d6c542b2c75c3f9fddfc928cef6831a69caad97e62a"} Dec 04 17:34:59 crc kubenswrapper[4631]: I1204 17:34:59.776749 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tzprv" podStartSLOduration=2.336625447 podStartE2EDuration="3.776732091s" podCreationTimestamp="2025-12-04 17:34:56 +0000 UTC" firstStartedPulling="2025-12-04 17:34:57.731711845 +0000 UTC m=+427.763953843" lastFinishedPulling="2025-12-04 17:34:59.171818489 +0000 UTC m=+429.204060487" observedRunningTime="2025-12-04 17:34:59.775592228 +0000 UTC m=+429.807834236" watchObservedRunningTime="2025-12-04 17:34:59.776732091 +0000 UTC m=+429.808974079" Dec 04 17:35:00 crc kubenswrapper[4631]: I1204 17:35:00.767586 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cfh7w" event={"ID":"e28d58eb-8c1a-4e76-87fc-aefb35295f30","Type":"ContainerStarted","Data":"1c7eabaca71caabafc69bcd2b635b33f05f07feeadca777687c0b0bfb16df4cd"} Dec 04 17:35:00 crc kubenswrapper[4631]: I1204 17:35:00.769689 4631 generic.go:334] "Generic (PLEG): container finished" podID="5267c3bf-4068-4178-9e3e-9a24e1c11a5e" containerID="9bca50fa30e9ba5050f87e955c0902129aa1dba6f85dec146cc6f5df5c54a38b" exitCode=0 Dec 04 17:35:00 crc kubenswrapper[4631]: I1204 17:35:00.769814 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-db4qf" event={"ID":"5267c3bf-4068-4178-9e3e-9a24e1c11a5e","Type":"ContainerDied","Data":"9bca50fa30e9ba5050f87e955c0902129aa1dba6f85dec146cc6f5df5c54a38b"} Dec 04 17:35:00 crc kubenswrapper[4631]: I1204 17:35:00.823677 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cfh7w" podStartSLOduration=2.399017571 podStartE2EDuration="4.823653813s" podCreationTimestamp="2025-12-04 17:34:56 +0000 UTC" firstStartedPulling="2025-12-04 17:34:57.703953353 +0000 UTC m=+427.736195351" lastFinishedPulling="2025-12-04 17:35:00.128589595 +0000 UTC m=+430.160831593" observedRunningTime="2025-12-04 17:35:00.816666901 +0000 UTC m=+430.848908899" watchObservedRunningTime="2025-12-04 17:35:00.823653813 +0000 UTC m=+430.855895811" Dec 04 17:35:01 crc kubenswrapper[4631]: I1204 17:35:01.777068 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-db4qf" event={"ID":"5267c3bf-4068-4178-9e3e-9a24e1c11a5e","Type":"ContainerStarted","Data":"d56764ec7d700d49b12811d22f46af46527e02444a9e40be867cd3a7a834880a"} Dec 04 17:35:01 crc kubenswrapper[4631]: I1204 17:35:01.808036 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-db4qf" podStartSLOduration=2.411832496 podStartE2EDuration="3.808014348s" podCreationTimestamp="2025-12-04 17:34:58 +0000 UTC" firstStartedPulling="2025-12-04 17:34:59.751628285 +0000 UTC m=+429.783870283" lastFinishedPulling="2025-12-04 17:35:01.147810137 +0000 UTC m=+431.180052135" observedRunningTime="2025-12-04 17:35:01.806795683 +0000 UTC m=+431.839037681" watchObservedRunningTime="2025-12-04 17:35:01.808014348 +0000 UTC m=+431.840256346" Dec 04 17:35:03 crc kubenswrapper[4631]: I1204 17:35:03.528108 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-rrg2v" Dec 04 17:35:03 crc kubenswrapper[4631]: I1204 17:35:03.597557 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m8hln"] Dec 04 17:35:04 crc kubenswrapper[4631]: I1204 17:35:04.757587 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-n859d" Dec 04 17:35:04 crc kubenswrapper[4631]: I1204 17:35:04.759809 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-n859d" Dec 04 17:35:04 crc kubenswrapper[4631]: I1204 17:35:04.804728 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-n859d" Dec 04 17:35:05 crc kubenswrapper[4631]: I1204 17:35:05.847798 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-n859d" Dec 04 17:35:06 crc kubenswrapper[4631]: I1204 17:35:06.023466 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:35:06 crc kubenswrapper[4631]: I1204 17:35:06.023570 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:35:06 crc kubenswrapper[4631]: I1204 17:35:06.023645 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:35:06 crc kubenswrapper[4631]: I1204 17:35:06.024452 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"609d239612b670bcf642d521340a1cbeabb8e40268181c9747fe492989b6287c"} pod="openshift-machine-config-operator/machine-config-daemon-q27wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 17:35:06 crc kubenswrapper[4631]: I1204 17:35:06.024562 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" containerID="cri-o://609d239612b670bcf642d521340a1cbeabb8e40268181c9747fe492989b6287c" gracePeriod=600 Dec 04 17:35:06 crc kubenswrapper[4631]: I1204 17:35:06.531672 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tzprv" Dec 04 17:35:06 crc kubenswrapper[4631]: I1204 17:35:06.532706 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tzprv" Dec 04 17:35:06 crc kubenswrapper[4631]: I1204 17:35:06.598389 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tzprv" Dec 04 17:35:06 crc kubenswrapper[4631]: I1204 17:35:06.804283 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerID="609d239612b670bcf642d521340a1cbeabb8e40268181c9747fe492989b6287c" exitCode=0 Dec 04 17:35:06 crc kubenswrapper[4631]: I1204 17:35:06.804403 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerDied","Data":"609d239612b670bcf642d521340a1cbeabb8e40268181c9747fe492989b6287c"} Dec 04 17:35:06 crc kubenswrapper[4631]: I1204 17:35:06.805307 4631 scope.go:117] "RemoveContainer" containerID="8dec29fc3df32aa42ed251ff7a05ea7d47e82d606779ed3a1f7a662c5a9ea6cd" Dec 04 17:35:06 crc kubenswrapper[4631]: I1204 17:35:06.849496 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tzprv" Dec 04 17:35:07 crc kubenswrapper[4631]: I1204 17:35:07.139747 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cfh7w" Dec 04 17:35:07 crc kubenswrapper[4631]: I1204 17:35:07.139817 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cfh7w" Dec 04 17:35:07 crc kubenswrapper[4631]: I1204 17:35:07.200111 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cfh7w" Dec 04 17:35:07 crc kubenswrapper[4631]: I1204 17:35:07.820554 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"3c93d30ef7a2bb653c0a583cb8f4fec3c259c54f861dc7199efed5370de610a0"} Dec 04 17:35:07 crc kubenswrapper[4631]: I1204 17:35:07.869669 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cfh7w" Dec 04 17:35:08 crc kubenswrapper[4631]: I1204 17:35:08.921001 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-db4qf" Dec 04 17:35:08 crc kubenswrapper[4631]: I1204 17:35:08.921572 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-db4qf" Dec 04 17:35:08 crc kubenswrapper[4631]: I1204 17:35:08.977034 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-db4qf" Dec 04 17:35:09 crc kubenswrapper[4631]: I1204 17:35:09.886608 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-db4qf" Dec 04 17:35:28 crc kubenswrapper[4631]: I1204 17:35:28.666235 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" podUID="52770a8d-d215-4fa8-8469-95a315e44850" containerName="registry" containerID="cri-o://0096b18d52a63440b7ba79eb2e870703ec2d2cacb727666a50405b0362a80bdf" gracePeriod=30 Dec 04 17:35:28 crc kubenswrapper[4631]: I1204 17:35:28.953659 4631 generic.go:334] "Generic (PLEG): container finished" podID="52770a8d-d215-4fa8-8469-95a315e44850" containerID="0096b18d52a63440b7ba79eb2e870703ec2d2cacb727666a50405b0362a80bdf" exitCode=0 Dec 04 17:35:28 crc kubenswrapper[4631]: I1204 17:35:28.953721 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" event={"ID":"52770a8d-d215-4fa8-8469-95a315e44850","Type":"ContainerDied","Data":"0096b18d52a63440b7ba79eb2e870703ec2d2cacb727666a50405b0362a80bdf"} Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.110657 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.183157 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/52770a8d-d215-4fa8-8469-95a315e44850-bound-sa-token\") pod \"52770a8d-d215-4fa8-8469-95a315e44850\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.183463 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/52770a8d-d215-4fa8-8469-95a315e44850-trusted-ca\") pod \"52770a8d-d215-4fa8-8469-95a315e44850\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.183666 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"52770a8d-d215-4fa8-8469-95a315e44850\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.183703 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/52770a8d-d215-4fa8-8469-95a315e44850-registry-certificates\") pod \"52770a8d-d215-4fa8-8469-95a315e44850\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.183733 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdbcd\" (UniqueName: \"kubernetes.io/projected/52770a8d-d215-4fa8-8469-95a315e44850-kube-api-access-jdbcd\") pod \"52770a8d-d215-4fa8-8469-95a315e44850\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.183771 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/52770a8d-d215-4fa8-8469-95a315e44850-ca-trust-extracted\") pod \"52770a8d-d215-4fa8-8469-95a315e44850\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.183824 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/52770a8d-d215-4fa8-8469-95a315e44850-installation-pull-secrets\") pod \"52770a8d-d215-4fa8-8469-95a315e44850\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.183886 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/52770a8d-d215-4fa8-8469-95a315e44850-registry-tls\") pod \"52770a8d-d215-4fa8-8469-95a315e44850\" (UID: \"52770a8d-d215-4fa8-8469-95a315e44850\") " Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.184328 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52770a8d-d215-4fa8-8469-95a315e44850-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "52770a8d-d215-4fa8-8469-95a315e44850" (UID: "52770a8d-d215-4fa8-8469-95a315e44850"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.186327 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52770a8d-d215-4fa8-8469-95a315e44850-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "52770a8d-d215-4fa8-8469-95a315e44850" (UID: "52770a8d-d215-4fa8-8469-95a315e44850"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.191077 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52770a8d-d215-4fa8-8469-95a315e44850-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "52770a8d-d215-4fa8-8469-95a315e44850" (UID: "52770a8d-d215-4fa8-8469-95a315e44850"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.191666 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52770a8d-d215-4fa8-8469-95a315e44850-kube-api-access-jdbcd" (OuterVolumeSpecName: "kube-api-access-jdbcd") pod "52770a8d-d215-4fa8-8469-95a315e44850" (UID: "52770a8d-d215-4fa8-8469-95a315e44850"). InnerVolumeSpecName "kube-api-access-jdbcd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.194873 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52770a8d-d215-4fa8-8469-95a315e44850-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "52770a8d-d215-4fa8-8469-95a315e44850" (UID: "52770a8d-d215-4fa8-8469-95a315e44850"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.195649 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52770a8d-d215-4fa8-8469-95a315e44850-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "52770a8d-d215-4fa8-8469-95a315e44850" (UID: "52770a8d-d215-4fa8-8469-95a315e44850"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.198839 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "52770a8d-d215-4fa8-8469-95a315e44850" (UID: "52770a8d-d215-4fa8-8469-95a315e44850"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.209734 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52770a8d-d215-4fa8-8469-95a315e44850-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "52770a8d-d215-4fa8-8469-95a315e44850" (UID: "52770a8d-d215-4fa8-8469-95a315e44850"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.285781 4631 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/52770a8d-d215-4fa8-8469-95a315e44850-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.286217 4631 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/52770a8d-d215-4fa8-8469-95a315e44850-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.286312 4631 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/52770a8d-d215-4fa8-8469-95a315e44850-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.286422 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdbcd\" (UniqueName: \"kubernetes.io/projected/52770a8d-d215-4fa8-8469-95a315e44850-kube-api-access-jdbcd\") on node \"crc\" DevicePath \"\"" Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.286514 4631 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/52770a8d-d215-4fa8-8469-95a315e44850-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.286613 4631 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/52770a8d-d215-4fa8-8469-95a315e44850-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.286677 4631 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/52770a8d-d215-4fa8-8469-95a315e44850-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.962443 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.962316 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-m8hln" event={"ID":"52770a8d-d215-4fa8-8469-95a315e44850","Type":"ContainerDied","Data":"25a06f8489dc0101d6da727452f3797101e1ea34a5811233e19b87887117adbd"} Dec 04 17:35:29 crc kubenswrapper[4631]: I1204 17:35:29.967085 4631 scope.go:117] "RemoveContainer" containerID="0096b18d52a63440b7ba79eb2e870703ec2d2cacb727666a50405b0362a80bdf" Dec 04 17:35:30 crc kubenswrapper[4631]: I1204 17:35:30.007994 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m8hln"] Dec 04 17:35:30 crc kubenswrapper[4631]: I1204 17:35:30.022575 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m8hln"] Dec 04 17:35:30 crc kubenswrapper[4631]: I1204 17:35:30.253828 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52770a8d-d215-4fa8-8469-95a315e44850" path="/var/lib/kubelet/pods/52770a8d-d215-4fa8-8469-95a315e44850/volumes" Dec 04 17:37:36 crc kubenswrapper[4631]: I1204 17:37:36.022687 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:37:36 crc kubenswrapper[4631]: I1204 17:37:36.023558 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:38:06 crc kubenswrapper[4631]: I1204 17:38:06.023126 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:38:06 crc kubenswrapper[4631]: I1204 17:38:06.025547 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:38:36 crc kubenswrapper[4631]: I1204 17:38:36.023111 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:38:36 crc kubenswrapper[4631]: I1204 17:38:36.024000 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:38:36 crc kubenswrapper[4631]: I1204 17:38:36.024065 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:38:36 crc kubenswrapper[4631]: I1204 17:38:36.024896 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3c93d30ef7a2bb653c0a583cb8f4fec3c259c54f861dc7199efed5370de610a0"} pod="openshift-machine-config-operator/machine-config-daemon-q27wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 17:38:36 crc kubenswrapper[4631]: I1204 17:38:36.024991 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" containerID="cri-o://3c93d30ef7a2bb653c0a583cb8f4fec3c259c54f861dc7199efed5370de610a0" gracePeriod=600 Dec 04 17:38:36 crc kubenswrapper[4631]: I1204 17:38:36.226118 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerID="3c93d30ef7a2bb653c0a583cb8f4fec3c259c54f861dc7199efed5370de610a0" exitCode=0 Dec 04 17:38:36 crc kubenswrapper[4631]: I1204 17:38:36.226178 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerDied","Data":"3c93d30ef7a2bb653c0a583cb8f4fec3c259c54f861dc7199efed5370de610a0"} Dec 04 17:38:36 crc kubenswrapper[4631]: I1204 17:38:36.226242 4631 scope.go:117] "RemoveContainer" containerID="609d239612b670bcf642d521340a1cbeabb8e40268181c9747fe492989b6287c" Dec 04 17:38:37 crc kubenswrapper[4631]: I1204 17:38:37.235460 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"2f45cdb997b468d85204e4185fae04daffcd45b83032693f3595260f7ec666b6"} Dec 04 17:38:50 crc kubenswrapper[4631]: I1204 17:38:50.576917 4631 scope.go:117] "RemoveContainer" containerID="10eae845693d459790c8476b105982ae870ae89a3d6d7379ac06d36483738b8a" Dec 04 17:40:36 crc kubenswrapper[4631]: I1204 17:40:36.023419 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:40:36 crc kubenswrapper[4631]: I1204 17:40:36.024251 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:40:56 crc kubenswrapper[4631]: I1204 17:40:56.906077 4631 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 04 17:41:06 crc kubenswrapper[4631]: I1204 17:41:06.023587 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:41:06 crc kubenswrapper[4631]: I1204 17:41:06.024502 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:41:36 crc kubenswrapper[4631]: I1204 17:41:36.023312 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:41:36 crc kubenswrapper[4631]: I1204 17:41:36.024580 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:41:36 crc kubenswrapper[4631]: I1204 17:41:36.024650 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:41:36 crc kubenswrapper[4631]: I1204 17:41:36.025654 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2f45cdb997b468d85204e4185fae04daffcd45b83032693f3595260f7ec666b6"} pod="openshift-machine-config-operator/machine-config-daemon-q27wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 17:41:36 crc kubenswrapper[4631]: I1204 17:41:36.025742 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" containerID="cri-o://2f45cdb997b468d85204e4185fae04daffcd45b83032693f3595260f7ec666b6" gracePeriod=600 Dec 04 17:41:36 crc kubenswrapper[4631]: I1204 17:41:36.989466 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerID="2f45cdb997b468d85204e4185fae04daffcd45b83032693f3595260f7ec666b6" exitCode=0 Dec 04 17:41:36 crc kubenswrapper[4631]: I1204 17:41:36.989538 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerDied","Data":"2f45cdb997b468d85204e4185fae04daffcd45b83032693f3595260f7ec666b6"} Dec 04 17:41:36 crc kubenswrapper[4631]: I1204 17:41:36.990070 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"34035c448316dbd54a5149bba9f1bb4ce2bc406518cc3e31ea4f3aeb08daaf2b"} Dec 04 17:41:36 crc kubenswrapper[4631]: I1204 17:41:36.990110 4631 scope.go:117] "RemoveContainer" containerID="3c93d30ef7a2bb653c0a583cb8f4fec3c259c54f861dc7199efed5370de610a0" Dec 04 17:42:36 crc kubenswrapper[4631]: I1204 17:42:36.863025 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-5vb4n"] Dec 04 17:42:36 crc kubenswrapper[4631]: E1204 17:42:36.864044 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52770a8d-d215-4fa8-8469-95a315e44850" containerName="registry" Dec 04 17:42:36 crc kubenswrapper[4631]: I1204 17:42:36.864056 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="52770a8d-d215-4fa8-8469-95a315e44850" containerName="registry" Dec 04 17:42:36 crc kubenswrapper[4631]: I1204 17:42:36.864140 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="52770a8d-d215-4fa8-8469-95a315e44850" containerName="registry" Dec 04 17:42:36 crc kubenswrapper[4631]: I1204 17:42:36.864536 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-5vb4n" Dec 04 17:42:36 crc kubenswrapper[4631]: I1204 17:42:36.867707 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 04 17:42:36 crc kubenswrapper[4631]: I1204 17:42:36.867729 4631 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-85x5l" Dec 04 17:42:36 crc kubenswrapper[4631]: I1204 17:42:36.867773 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 04 17:42:36 crc kubenswrapper[4631]: I1204 17:42:36.884937 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-6djk2"] Dec 04 17:42:36 crc kubenswrapper[4631]: I1204 17:42:36.885646 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-6djk2" Dec 04 17:42:36 crc kubenswrapper[4631]: I1204 17:42:36.894911 4631 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-sxz8n" Dec 04 17:42:36 crc kubenswrapper[4631]: I1204 17:42:36.899959 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-tffkj"] Dec 04 17:42:36 crc kubenswrapper[4631]: I1204 17:42:36.900753 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-tffkj" Dec 04 17:42:36 crc kubenswrapper[4631]: I1204 17:42:36.904143 4631 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-8tq24" Dec 04 17:42:36 crc kubenswrapper[4631]: I1204 17:42:36.912264 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-6djk2"] Dec 04 17:42:36 crc kubenswrapper[4631]: I1204 17:42:36.925967 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-5vb4n"] Dec 04 17:42:36 crc kubenswrapper[4631]: I1204 17:42:36.935876 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-tffkj"] Dec 04 17:42:36 crc kubenswrapper[4631]: I1204 17:42:36.960639 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdnwk\" (UniqueName: \"kubernetes.io/projected/5a3b7e1e-41ff-4029-b2cd-6dc6be40ae3d-kube-api-access-wdnwk\") pod \"cert-manager-cainjector-7f985d654d-5vb4n\" (UID: \"5a3b7e1e-41ff-4029-b2cd-6dc6be40ae3d\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-5vb4n" Dec 04 17:42:37 crc kubenswrapper[4631]: I1204 17:42:37.061690 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q474j\" (UniqueName: \"kubernetes.io/projected/ea64b959-aecd-46e1-b2a4-cde17cc753d8-kube-api-access-q474j\") pod \"cert-manager-5b446d88c5-6djk2\" (UID: \"ea64b959-aecd-46e1-b2a4-cde17cc753d8\") " pod="cert-manager/cert-manager-5b446d88c5-6djk2" Dec 04 17:42:37 crc kubenswrapper[4631]: I1204 17:42:37.061779 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkbhl\" (UniqueName: \"kubernetes.io/projected/d558fa33-5875-4eb1-80ec-2f5726659b7e-kube-api-access-dkbhl\") pod \"cert-manager-webhook-5655c58dd6-tffkj\" (UID: \"d558fa33-5875-4eb1-80ec-2f5726659b7e\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-tffkj" Dec 04 17:42:37 crc kubenswrapper[4631]: I1204 17:42:37.061804 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdnwk\" (UniqueName: \"kubernetes.io/projected/5a3b7e1e-41ff-4029-b2cd-6dc6be40ae3d-kube-api-access-wdnwk\") pod \"cert-manager-cainjector-7f985d654d-5vb4n\" (UID: \"5a3b7e1e-41ff-4029-b2cd-6dc6be40ae3d\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-5vb4n" Dec 04 17:42:37 crc kubenswrapper[4631]: I1204 17:42:37.088670 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdnwk\" (UniqueName: \"kubernetes.io/projected/5a3b7e1e-41ff-4029-b2cd-6dc6be40ae3d-kube-api-access-wdnwk\") pod \"cert-manager-cainjector-7f985d654d-5vb4n\" (UID: \"5a3b7e1e-41ff-4029-b2cd-6dc6be40ae3d\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-5vb4n" Dec 04 17:42:37 crc kubenswrapper[4631]: I1204 17:42:37.164151 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q474j\" (UniqueName: \"kubernetes.io/projected/ea64b959-aecd-46e1-b2a4-cde17cc753d8-kube-api-access-q474j\") pod \"cert-manager-5b446d88c5-6djk2\" (UID: \"ea64b959-aecd-46e1-b2a4-cde17cc753d8\") " pod="cert-manager/cert-manager-5b446d88c5-6djk2" Dec 04 17:42:37 crc kubenswrapper[4631]: I1204 17:42:37.164643 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkbhl\" (UniqueName: \"kubernetes.io/projected/d558fa33-5875-4eb1-80ec-2f5726659b7e-kube-api-access-dkbhl\") pod \"cert-manager-webhook-5655c58dd6-tffkj\" (UID: \"d558fa33-5875-4eb1-80ec-2f5726659b7e\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-tffkj" Dec 04 17:42:37 crc kubenswrapper[4631]: I1204 17:42:37.180098 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-5vb4n" Dec 04 17:42:37 crc kubenswrapper[4631]: I1204 17:42:37.184829 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q474j\" (UniqueName: \"kubernetes.io/projected/ea64b959-aecd-46e1-b2a4-cde17cc753d8-kube-api-access-q474j\") pod \"cert-manager-5b446d88c5-6djk2\" (UID: \"ea64b959-aecd-46e1-b2a4-cde17cc753d8\") " pod="cert-manager/cert-manager-5b446d88c5-6djk2" Dec 04 17:42:37 crc kubenswrapper[4631]: I1204 17:42:37.189553 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkbhl\" (UniqueName: \"kubernetes.io/projected/d558fa33-5875-4eb1-80ec-2f5726659b7e-kube-api-access-dkbhl\") pod \"cert-manager-webhook-5655c58dd6-tffkj\" (UID: \"d558fa33-5875-4eb1-80ec-2f5726659b7e\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-tffkj" Dec 04 17:42:37 crc kubenswrapper[4631]: I1204 17:42:37.204146 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-6djk2" Dec 04 17:42:37 crc kubenswrapper[4631]: I1204 17:42:37.220223 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-tffkj" Dec 04 17:42:37 crc kubenswrapper[4631]: I1204 17:42:37.550183 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-6djk2"] Dec 04 17:42:37 crc kubenswrapper[4631]: I1204 17:42:37.561045 4631 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 17:42:37 crc kubenswrapper[4631]: I1204 17:42:37.631555 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-6djk2" event={"ID":"ea64b959-aecd-46e1-b2a4-cde17cc753d8","Type":"ContainerStarted","Data":"282347667eb31d62b4631242d058fdff19da962897070f3b91cc28e42fd31bc8"} Dec 04 17:42:37 crc kubenswrapper[4631]: I1204 17:42:37.656764 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-5vb4n"] Dec 04 17:42:37 crc kubenswrapper[4631]: W1204 17:42:37.664158 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a3b7e1e_41ff_4029_b2cd_6dc6be40ae3d.slice/crio-d41102b65e7bf2dad79b41830c1ae86659990e292ff97b9048ebbddb4471d348 WatchSource:0}: Error finding container d41102b65e7bf2dad79b41830c1ae86659990e292ff97b9048ebbddb4471d348: Status 404 returned error can't find the container with id d41102b65e7bf2dad79b41830c1ae86659990e292ff97b9048ebbddb4471d348 Dec 04 17:42:37 crc kubenswrapper[4631]: W1204 17:42:37.711072 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd558fa33_5875_4eb1_80ec_2f5726659b7e.slice/crio-db698de904786c160e3dca1df68c293aaf57112518e21dadbfb4167f166bdd67 WatchSource:0}: Error finding container db698de904786c160e3dca1df68c293aaf57112518e21dadbfb4167f166bdd67: Status 404 returned error can't find the container with id db698de904786c160e3dca1df68c293aaf57112518e21dadbfb4167f166bdd67 Dec 04 17:42:37 crc kubenswrapper[4631]: I1204 17:42:37.713679 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-tffkj"] Dec 04 17:42:38 crc kubenswrapper[4631]: I1204 17:42:38.638084 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-tffkj" event={"ID":"d558fa33-5875-4eb1-80ec-2f5726659b7e","Type":"ContainerStarted","Data":"db698de904786c160e3dca1df68c293aaf57112518e21dadbfb4167f166bdd67"} Dec 04 17:42:38 crc kubenswrapper[4631]: I1204 17:42:38.638926 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-5vb4n" event={"ID":"5a3b7e1e-41ff-4029-b2cd-6dc6be40ae3d","Type":"ContainerStarted","Data":"d41102b65e7bf2dad79b41830c1ae86659990e292ff97b9048ebbddb4471d348"} Dec 04 17:42:41 crc kubenswrapper[4631]: I1204 17:42:41.666988 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-6djk2" event={"ID":"ea64b959-aecd-46e1-b2a4-cde17cc753d8","Type":"ContainerStarted","Data":"02bf927864fd5d5831a4b419c9b4220e2604dc86bea6a609b5e01b1332da5507"} Dec 04 17:42:41 crc kubenswrapper[4631]: I1204 17:42:41.669784 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-5vb4n" event={"ID":"5a3b7e1e-41ff-4029-b2cd-6dc6be40ae3d","Type":"ContainerStarted","Data":"bfc3f4224d604868b024f4d4e1de006c25c0e05c836f5adb4b1a3a359164bfa0"} Dec 04 17:42:41 crc kubenswrapper[4631]: I1204 17:42:41.672068 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-tffkj" event={"ID":"d558fa33-5875-4eb1-80ec-2f5726659b7e","Type":"ContainerStarted","Data":"ea9f0f5b32067cd9ab4d92d9a5270e14aa412bc878a18909ad085ad534c1d067"} Dec 04 17:42:41 crc kubenswrapper[4631]: I1204 17:42:41.672211 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-tffkj" Dec 04 17:42:41 crc kubenswrapper[4631]: I1204 17:42:41.687077 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-6djk2" podStartSLOduration=2.759580009 podStartE2EDuration="5.687056613s" podCreationTimestamp="2025-12-04 17:42:36 +0000 UTC" firstStartedPulling="2025-12-04 17:42:37.560829052 +0000 UTC m=+887.593071050" lastFinishedPulling="2025-12-04 17:42:40.488305656 +0000 UTC m=+890.520547654" observedRunningTime="2025-12-04 17:42:41.681815613 +0000 UTC m=+891.714057611" watchObservedRunningTime="2025-12-04 17:42:41.687056613 +0000 UTC m=+891.719298611" Dec 04 17:42:41 crc kubenswrapper[4631]: I1204 17:42:41.703980 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-5vb4n" podStartSLOduration=2.892670596 podStartE2EDuration="5.703959835s" podCreationTimestamp="2025-12-04 17:42:36 +0000 UTC" firstStartedPulling="2025-12-04 17:42:37.665980232 +0000 UTC m=+887.698222230" lastFinishedPulling="2025-12-04 17:42:40.477269471 +0000 UTC m=+890.509511469" observedRunningTime="2025-12-04 17:42:41.702566745 +0000 UTC m=+891.734808753" watchObservedRunningTime="2025-12-04 17:42:41.703959835 +0000 UTC m=+891.736201833" Dec 04 17:42:41 crc kubenswrapper[4631]: I1204 17:42:41.719652 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-tffkj" podStartSLOduration=1.951635993 podStartE2EDuration="5.719628932s" podCreationTimestamp="2025-12-04 17:42:36 +0000 UTC" firstStartedPulling="2025-12-04 17:42:37.715995699 +0000 UTC m=+887.748237697" lastFinishedPulling="2025-12-04 17:42:41.483988648 +0000 UTC m=+891.516230636" observedRunningTime="2025-12-04 17:42:41.715752102 +0000 UTC m=+891.747994100" watchObservedRunningTime="2025-12-04 17:42:41.719628932 +0000 UTC m=+891.751870930" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.225777 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-tffkj" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.428094 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-vpgzg"] Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.428828 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovn-controller" containerID="cri-o://22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75" gracePeriod=30 Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.428917 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="northd" containerID="cri-o://48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a" gracePeriod=30 Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.428977 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovn-acl-logging" containerID="cri-o://ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23" gracePeriod=30 Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.429062 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="nbdb" containerID="cri-o://05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442" gracePeriod=30 Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.429064 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053" gracePeriod=30 Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.428989 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="kube-rbac-proxy-node" containerID="cri-o://eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326" gracePeriod=30 Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.429088 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="sbdb" containerID="cri-o://c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3" gracePeriod=30 Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.502068 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovnkube-controller" containerID="cri-o://7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0" gracePeriod=30 Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.707290 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovnkube-controller/3.log" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.708698 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovn-acl-logging/0.log" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.709075 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovn-controller/0.log" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.709383 4631 generic.go:334] "Generic (PLEG): container finished" podID="0d617abc-dc04-4807-b684-3640cde38e81" containerID="7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0" exitCode=0 Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.709407 4631 generic.go:334] "Generic (PLEG): container finished" podID="0d617abc-dc04-4807-b684-3640cde38e81" containerID="9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053" exitCode=0 Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.709416 4631 generic.go:334] "Generic (PLEG): container finished" podID="0d617abc-dc04-4807-b684-3640cde38e81" containerID="eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326" exitCode=0 Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.709423 4631 generic.go:334] "Generic (PLEG): container finished" podID="0d617abc-dc04-4807-b684-3640cde38e81" containerID="ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23" exitCode=143 Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.709432 4631 generic.go:334] "Generic (PLEG): container finished" podID="0d617abc-dc04-4807-b684-3640cde38e81" containerID="22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75" exitCode=143 Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.709473 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerDied","Data":"7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0"} Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.709513 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerDied","Data":"9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053"} Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.709523 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerDied","Data":"eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326"} Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.709531 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerDied","Data":"ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23"} Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.709540 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerDied","Data":"22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75"} Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.709574 4631 scope.go:117] "RemoveContainer" containerID="04cf802bdf1fd0d81a685ca8171bfcea89086fdd4979fcc18df7c134d482e052" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.711936 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zllp2_03e821a0-13d4-417c-9e54-7073b08490db/kube-multus/2.log" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.712407 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zllp2_03e821a0-13d4-417c-9e54-7073b08490db/kube-multus/1.log" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.712427 4631 generic.go:334] "Generic (PLEG): container finished" podID="03e821a0-13d4-417c-9e54-7073b08490db" containerID="0318e7532312b85217ec8efa77e3954b4f97cddd0c6c323b138b158f7fe38080" exitCode=2 Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.712444 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zllp2" event={"ID":"03e821a0-13d4-417c-9e54-7073b08490db","Type":"ContainerDied","Data":"0318e7532312b85217ec8efa77e3954b4f97cddd0c6c323b138b158f7fe38080"} Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.712976 4631 scope.go:117] "RemoveContainer" containerID="0318e7532312b85217ec8efa77e3954b4f97cddd0c6c323b138b158f7fe38080" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.797990 4631 scope.go:117] "RemoveContainer" containerID="efdd152e4738f125d721a6d044c0d96a378761e28d18f292c85706d0f3158f4f" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.847746 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovn-acl-logging/0.log" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.848344 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovn-controller/0.log" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.848879 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.907603 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-t5hnw"] Dec 04 17:42:47 crc kubenswrapper[4631]: E1204 17:42:47.907803 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="kube-rbac-proxy-ovn-metrics" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.907820 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="kube-rbac-proxy-ovn-metrics" Dec 04 17:42:47 crc kubenswrapper[4631]: E1204 17:42:47.907828 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovn-acl-logging" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.907836 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovn-acl-logging" Dec 04 17:42:47 crc kubenswrapper[4631]: E1204 17:42:47.907847 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="kube-rbac-proxy-node" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.907854 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="kube-rbac-proxy-node" Dec 04 17:42:47 crc kubenswrapper[4631]: E1204 17:42:47.907860 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovn-controller" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.907866 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovn-controller" Dec 04 17:42:47 crc kubenswrapper[4631]: E1204 17:42:47.907874 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovnkube-controller" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.907880 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovnkube-controller" Dec 04 17:42:47 crc kubenswrapper[4631]: E1204 17:42:47.907887 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovnkube-controller" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.907893 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovnkube-controller" Dec 04 17:42:47 crc kubenswrapper[4631]: E1204 17:42:47.907919 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovnkube-controller" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.907927 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovnkube-controller" Dec 04 17:42:47 crc kubenswrapper[4631]: E1204 17:42:47.907939 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="kubecfg-setup" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.907951 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="kubecfg-setup" Dec 04 17:42:47 crc kubenswrapper[4631]: E1204 17:42:47.907960 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="sbdb" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.907965 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="sbdb" Dec 04 17:42:47 crc kubenswrapper[4631]: E1204 17:42:47.907973 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="nbdb" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.907980 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="nbdb" Dec 04 17:42:47 crc kubenswrapper[4631]: E1204 17:42:47.907989 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="northd" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.907996 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="northd" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.908082 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovnkube-controller" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.908092 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovnkube-controller" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.908100 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovn-acl-logging" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.908108 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovnkube-controller" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.908115 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="nbdb" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.908125 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovn-controller" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.908132 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="kube-rbac-proxy-ovn-metrics" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.908141 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="sbdb" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.908152 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="northd" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.908160 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="kube-rbac-proxy-node" Dec 04 17:42:47 crc kubenswrapper[4631]: E1204 17:42:47.908258 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovnkube-controller" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.908266 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovnkube-controller" Dec 04 17:42:47 crc kubenswrapper[4631]: E1204 17:42:47.908274 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovnkube-controller" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.908281 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovnkube-controller" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.908380 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovnkube-controller" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.908548 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d617abc-dc04-4807-b684-3640cde38e81" containerName="ovnkube-controller" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.909849 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.916882 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-run-ovn\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.916921 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-node-log\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.916940 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-run-systemd\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.916960 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-run-netns\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.916981 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-systemd-units\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.916980 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917006 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-run-openvswitch\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917029 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0d617abc-dc04-4807-b684-3640cde38e81-env-overrides\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917049 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0d617abc-dc04-4807-b684-3640cde38e81-ovnkube-config\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917064 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-cni-bin\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917084 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-etc-openvswitch\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917102 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-var-lib-openvswitch\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917117 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-kubelet\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917135 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-var-lib-cni-networks-ovn-kubernetes\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917152 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-run-ovn-kubernetes\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917172 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-cni-netd\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917188 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0d617abc-dc04-4807-b684-3640cde38e81-ovn-node-metrics-cert\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917252 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78nbb\" (UniqueName: \"kubernetes.io/projected/1d623ba1-82b7-4e29-8b85-00643d387aad-kube-api-access-78nbb\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917271 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-run-openvswitch\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917284 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1d623ba1-82b7-4e29-8b85-00643d387aad-ovnkube-config\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917299 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-node-log\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917325 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-log-socket\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917353 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917389 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-cni-netd\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917404 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-cni-bin\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917424 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-run-ovn\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917446 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-run-ovn-kubernetes\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917463 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-var-lib-openvswitch\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917482 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-systemd-units\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917499 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-etc-openvswitch\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917517 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-run-netns\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917533 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-slash\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917549 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1d623ba1-82b7-4e29-8b85-00643d387aad-ovn-node-metrics-cert\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917568 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1d623ba1-82b7-4e29-8b85-00643d387aad-env-overrides\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917593 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1d623ba1-82b7-4e29-8b85-00643d387aad-ovnkube-script-lib\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917610 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-run-systemd\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917627 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-kubelet\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917668 4631 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917730 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917756 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.917777 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.918191 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d617abc-dc04-4807-b684-3640cde38e81-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.918722 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d617abc-dc04-4807-b684-3640cde38e81-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.918763 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.918795 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.918861 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.918862 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.918868 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-node-log" (OuterVolumeSpecName: "node-log") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.918879 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.918911 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.918959 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.925814 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d617abc-dc04-4807-b684-3640cde38e81-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:42:47 crc kubenswrapper[4631]: I1204 17:42:47.933709 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018171 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtfmf\" (UniqueName: \"kubernetes.io/projected/0d617abc-dc04-4807-b684-3640cde38e81-kube-api-access-vtfmf\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018216 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0d617abc-dc04-4807-b684-3640cde38e81-ovnkube-script-lib\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018240 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-log-socket\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018256 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-slash\") pod \"0d617abc-dc04-4807-b684-3640cde38e81\" (UID: \"0d617abc-dc04-4807-b684-3640cde38e81\") " Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018332 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-log-socket\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018354 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018388 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-cni-netd\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018406 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-cni-bin\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018424 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-run-ovn\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018444 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-run-ovn-kubernetes\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018462 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-var-lib-openvswitch\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018479 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-systemd-units\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018495 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-etc-openvswitch\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018510 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-run-netns\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018524 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-slash\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018538 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1d623ba1-82b7-4e29-8b85-00643d387aad-ovn-node-metrics-cert\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018554 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1d623ba1-82b7-4e29-8b85-00643d387aad-env-overrides\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018575 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1d623ba1-82b7-4e29-8b85-00643d387aad-ovnkube-script-lib\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018590 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-run-systemd\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018607 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-kubelet\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018626 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78nbb\" (UniqueName: \"kubernetes.io/projected/1d623ba1-82b7-4e29-8b85-00643d387aad-kube-api-access-78nbb\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018641 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-run-openvswitch\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018656 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1d623ba1-82b7-4e29-8b85-00643d387aad-ovnkube-config\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018676 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-node-log\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018718 4631 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-node-log\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018729 4631 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018737 4631 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018745 4631 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018756 4631 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018750 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d617abc-dc04-4807-b684-3640cde38e81-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018765 4631 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0d617abc-dc04-4807-b684-3640cde38e81-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018821 4631 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018833 4631 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0d617abc-dc04-4807-b684-3640cde38e81-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018834 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-log-socket" (OuterVolumeSpecName: "log-socket") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018844 4631 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018855 4631 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018861 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-slash" (OuterVolumeSpecName: "host-slash") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018866 4631 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018877 4631 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018882 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-log-socket\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018890 4631 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018904 4631 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018909 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018916 4631 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0d617abc-dc04-4807-b684-3640cde38e81-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018794 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-node-log\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018930 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-cni-netd\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.018949 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-cni-bin\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.019142 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-systemd-units\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.019186 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-run-ovn\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.019211 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-run-ovn-kubernetes\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.019236 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-var-lib-openvswitch\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.019259 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-run-netns\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.019280 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-etc-openvswitch\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.019303 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-kubelet\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.019866 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1d623ba1-82b7-4e29-8b85-00643d387aad-ovnkube-script-lib\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.019902 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-run-systemd\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.019924 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-host-slash\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.019946 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1d623ba1-82b7-4e29-8b85-00643d387aad-run-openvswitch\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.020049 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1d623ba1-82b7-4e29-8b85-00643d387aad-env-overrides\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.020614 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1d623ba1-82b7-4e29-8b85-00643d387aad-ovnkube-config\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.023152 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d617abc-dc04-4807-b684-3640cde38e81-kube-api-access-vtfmf" (OuterVolumeSpecName: "kube-api-access-vtfmf") pod "0d617abc-dc04-4807-b684-3640cde38e81" (UID: "0d617abc-dc04-4807-b684-3640cde38e81"). InnerVolumeSpecName "kube-api-access-vtfmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.026049 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1d623ba1-82b7-4e29-8b85-00643d387aad-ovn-node-metrics-cert\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.035075 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78nbb\" (UniqueName: \"kubernetes.io/projected/1d623ba1-82b7-4e29-8b85-00643d387aad-kube-api-access-78nbb\") pod \"ovnkube-node-t5hnw\" (UID: \"1d623ba1-82b7-4e29-8b85-00643d387aad\") " pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.119642 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtfmf\" (UniqueName: \"kubernetes.io/projected/0d617abc-dc04-4807-b684-3640cde38e81-kube-api-access-vtfmf\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.119685 4631 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0d617abc-dc04-4807-b684-3640cde38e81-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.119700 4631 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-log-socket\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.119711 4631 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0d617abc-dc04-4807-b684-3640cde38e81-host-slash\") on node \"crc\" DevicePath \"\"" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.225799 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.724753 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovn-acl-logging/0.log" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.725647 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vpgzg_0d617abc-dc04-4807-b684-3640cde38e81/ovn-controller/0.log" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.726102 4631 generic.go:334] "Generic (PLEG): container finished" podID="0d617abc-dc04-4807-b684-3640cde38e81" containerID="c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3" exitCode=0 Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.726126 4631 generic.go:334] "Generic (PLEG): container finished" podID="0d617abc-dc04-4807-b684-3640cde38e81" containerID="05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442" exitCode=0 Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.726140 4631 generic.go:334] "Generic (PLEG): container finished" podID="0d617abc-dc04-4807-b684-3640cde38e81" containerID="48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a" exitCode=0 Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.726218 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerDied","Data":"c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3"} Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.726251 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerDied","Data":"05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442"} Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.726252 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.726281 4631 scope.go:117] "RemoveContainer" containerID="7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.726268 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerDied","Data":"48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a"} Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.726411 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vpgzg" event={"ID":"0d617abc-dc04-4807-b684-3640cde38e81","Type":"ContainerDied","Data":"b107e2d89e7d0e33e6769dccef66a592afa22d2dad94b499d72acbfa8a438a4f"} Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.729098 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zllp2_03e821a0-13d4-417c-9e54-7073b08490db/kube-multus/2.log" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.729177 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zllp2" event={"ID":"03e821a0-13d4-417c-9e54-7073b08490db","Type":"ContainerStarted","Data":"a6ae4696137aebef9d85bc6ecd62dc3fef4cc68727be55f5c18d9ce8d318e63b"} Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.731578 4631 generic.go:334] "Generic (PLEG): container finished" podID="1d623ba1-82b7-4e29-8b85-00643d387aad" containerID="6b8022c1580eea7399fe1815582a909806958e0d04c4f67e34bf03b3410d759f" exitCode=0 Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.731614 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" event={"ID":"1d623ba1-82b7-4e29-8b85-00643d387aad","Type":"ContainerDied","Data":"6b8022c1580eea7399fe1815582a909806958e0d04c4f67e34bf03b3410d759f"} Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.731635 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" event={"ID":"1d623ba1-82b7-4e29-8b85-00643d387aad","Type":"ContainerStarted","Data":"52f9baf5ea751f73d02366f22fef08c7bd6dfa77121fd70587309ba342abbf5e"} Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.758555 4631 scope.go:117] "RemoveContainer" containerID="c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.821191 4631 scope.go:117] "RemoveContainer" containerID="05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.831586 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-vpgzg"] Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.843976 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-vpgzg"] Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.845109 4631 scope.go:117] "RemoveContainer" containerID="48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.869169 4631 scope.go:117] "RemoveContainer" containerID="9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.884642 4631 scope.go:117] "RemoveContainer" containerID="eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.896532 4631 scope.go:117] "RemoveContainer" containerID="ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.918463 4631 scope.go:117] "RemoveContainer" containerID="22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.952646 4631 scope.go:117] "RemoveContainer" containerID="00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.986825 4631 scope.go:117] "RemoveContainer" containerID="7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0" Dec 04 17:42:48 crc kubenswrapper[4631]: E1204 17:42:48.993336 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0\": container with ID starting with 7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0 not found: ID does not exist" containerID="7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.993411 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0"} err="failed to get container status \"7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0\": rpc error: code = NotFound desc = could not find container \"7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0\": container with ID starting with 7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0 not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.993441 4631 scope.go:117] "RemoveContainer" containerID="c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3" Dec 04 17:42:48 crc kubenswrapper[4631]: E1204 17:42:48.993817 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\": container with ID starting with c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3 not found: ID does not exist" containerID="c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.993848 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3"} err="failed to get container status \"c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\": rpc error: code = NotFound desc = could not find container \"c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\": container with ID starting with c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3 not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.993866 4631 scope.go:117] "RemoveContainer" containerID="05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442" Dec 04 17:42:48 crc kubenswrapper[4631]: E1204 17:42:48.994207 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\": container with ID starting with 05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442 not found: ID does not exist" containerID="05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.994232 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442"} err="failed to get container status \"05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\": rpc error: code = NotFound desc = could not find container \"05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\": container with ID starting with 05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442 not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.994248 4631 scope.go:117] "RemoveContainer" containerID="48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a" Dec 04 17:42:48 crc kubenswrapper[4631]: E1204 17:42:48.994550 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\": container with ID starting with 48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a not found: ID does not exist" containerID="48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.994579 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a"} err="failed to get container status \"48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\": rpc error: code = NotFound desc = could not find container \"48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\": container with ID starting with 48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.994597 4631 scope.go:117] "RemoveContainer" containerID="9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053" Dec 04 17:42:48 crc kubenswrapper[4631]: E1204 17:42:48.994849 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\": container with ID starting with 9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053 not found: ID does not exist" containerID="9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.994887 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053"} err="failed to get container status \"9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\": rpc error: code = NotFound desc = could not find container \"9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\": container with ID starting with 9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053 not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.994904 4631 scope.go:117] "RemoveContainer" containerID="eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326" Dec 04 17:42:48 crc kubenswrapper[4631]: E1204 17:42:48.995174 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\": container with ID starting with eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326 not found: ID does not exist" containerID="eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.995204 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326"} err="failed to get container status \"eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\": rpc error: code = NotFound desc = could not find container \"eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\": container with ID starting with eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326 not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.995221 4631 scope.go:117] "RemoveContainer" containerID="ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23" Dec 04 17:42:48 crc kubenswrapper[4631]: E1204 17:42:48.995571 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\": container with ID starting with ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23 not found: ID does not exist" containerID="ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.995601 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23"} err="failed to get container status \"ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\": rpc error: code = NotFound desc = could not find container \"ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\": container with ID starting with ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23 not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.995621 4631 scope.go:117] "RemoveContainer" containerID="22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75" Dec 04 17:42:48 crc kubenswrapper[4631]: E1204 17:42:48.995916 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\": container with ID starting with 22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75 not found: ID does not exist" containerID="22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.995945 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75"} err="failed to get container status \"22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\": rpc error: code = NotFound desc = could not find container \"22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\": container with ID starting with 22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75 not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.995962 4631 scope.go:117] "RemoveContainer" containerID="00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2" Dec 04 17:42:48 crc kubenswrapper[4631]: E1204 17:42:48.996234 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\": container with ID starting with 00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2 not found: ID does not exist" containerID="00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.996270 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2"} err="failed to get container status \"00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\": rpc error: code = NotFound desc = could not find container \"00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\": container with ID starting with 00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2 not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.996287 4631 scope.go:117] "RemoveContainer" containerID="7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.996571 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0"} err="failed to get container status \"7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0\": rpc error: code = NotFound desc = could not find container \"7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0\": container with ID starting with 7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0 not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.996596 4631 scope.go:117] "RemoveContainer" containerID="c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.996872 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3"} err="failed to get container status \"c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\": rpc error: code = NotFound desc = could not find container \"c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\": container with ID starting with c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3 not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.996895 4631 scope.go:117] "RemoveContainer" containerID="05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.997141 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442"} err="failed to get container status \"05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\": rpc error: code = NotFound desc = could not find container \"05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\": container with ID starting with 05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442 not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.997162 4631 scope.go:117] "RemoveContainer" containerID="48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.997470 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a"} err="failed to get container status \"48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\": rpc error: code = NotFound desc = could not find container \"48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\": container with ID starting with 48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.997492 4631 scope.go:117] "RemoveContainer" containerID="9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.997761 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053"} err="failed to get container status \"9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\": rpc error: code = NotFound desc = could not find container \"9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\": container with ID starting with 9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053 not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.997786 4631 scope.go:117] "RemoveContainer" containerID="eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.998063 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326"} err="failed to get container status \"eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\": rpc error: code = NotFound desc = could not find container \"eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\": container with ID starting with eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326 not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.998084 4631 scope.go:117] "RemoveContainer" containerID="ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.998357 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23"} err="failed to get container status \"ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\": rpc error: code = NotFound desc = could not find container \"ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\": container with ID starting with ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23 not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.998432 4631 scope.go:117] "RemoveContainer" containerID="22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.998700 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75"} err="failed to get container status \"22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\": rpc error: code = NotFound desc = could not find container \"22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\": container with ID starting with 22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75 not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.998720 4631 scope.go:117] "RemoveContainer" containerID="00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.998968 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2"} err="failed to get container status \"00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\": rpc error: code = NotFound desc = could not find container \"00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\": container with ID starting with 00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2 not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.998991 4631 scope.go:117] "RemoveContainer" containerID="7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.999207 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0"} err="failed to get container status \"7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0\": rpc error: code = NotFound desc = could not find container \"7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0\": container with ID starting with 7c71d085820974a1cbda612e7d54d2ab7ed9bcbe7f4464fc63e95ea654cd69a0 not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.999228 4631 scope.go:117] "RemoveContainer" containerID="c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.999501 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3"} err="failed to get container status \"c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\": rpc error: code = NotFound desc = could not find container \"c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3\": container with ID starting with c828ebc59172a619900b09861f5d8d424747ef43de0d8c91e7e4e02414c641c3 not found: ID does not exist" Dec 04 17:42:48 crc kubenswrapper[4631]: I1204 17:42:48.999523 4631 scope.go:117] "RemoveContainer" containerID="05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442" Dec 04 17:42:49 crc kubenswrapper[4631]: I1204 17:42:49.000042 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442"} err="failed to get container status \"05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\": rpc error: code = NotFound desc = could not find container \"05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442\": container with ID starting with 05b2f3633d723be51d65e13f418f6ed3f0c859f12c1524f17c22c1aaae3f8442 not found: ID does not exist" Dec 04 17:42:49 crc kubenswrapper[4631]: I1204 17:42:49.000066 4631 scope.go:117] "RemoveContainer" containerID="48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a" Dec 04 17:42:49 crc kubenswrapper[4631]: I1204 17:42:49.000570 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a"} err="failed to get container status \"48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\": rpc error: code = NotFound desc = could not find container \"48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a\": container with ID starting with 48d7e06efa0b645f2b45354c91abb9519c21744b7641fd036fac91043201c70a not found: ID does not exist" Dec 04 17:42:49 crc kubenswrapper[4631]: I1204 17:42:49.000589 4631 scope.go:117] "RemoveContainer" containerID="9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053" Dec 04 17:42:49 crc kubenswrapper[4631]: I1204 17:42:49.000882 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053"} err="failed to get container status \"9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\": rpc error: code = NotFound desc = could not find container \"9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053\": container with ID starting with 9335cfb8db9b2f834c758b864ceeb6058e2c13f869dfc495847c873232eba053 not found: ID does not exist" Dec 04 17:42:49 crc kubenswrapper[4631]: I1204 17:42:49.000899 4631 scope.go:117] "RemoveContainer" containerID="eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326" Dec 04 17:42:49 crc kubenswrapper[4631]: I1204 17:42:49.001133 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326"} err="failed to get container status \"eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\": rpc error: code = NotFound desc = could not find container \"eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326\": container with ID starting with eb2771f05bdaf85f67d50bc0ca85d2112948679fad396e523274f57b9dc6c326 not found: ID does not exist" Dec 04 17:42:49 crc kubenswrapper[4631]: I1204 17:42:49.001155 4631 scope.go:117] "RemoveContainer" containerID="ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23" Dec 04 17:42:49 crc kubenswrapper[4631]: I1204 17:42:49.001350 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23"} err="failed to get container status \"ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\": rpc error: code = NotFound desc = could not find container \"ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23\": container with ID starting with ccc36c4f375a9a9f899e230e5b95baa6f5f2b6ad530141ce2819ac477e3f3b23 not found: ID does not exist" Dec 04 17:42:49 crc kubenswrapper[4631]: I1204 17:42:49.001399 4631 scope.go:117] "RemoveContainer" containerID="22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75" Dec 04 17:42:49 crc kubenswrapper[4631]: I1204 17:42:49.002050 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75"} err="failed to get container status \"22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\": rpc error: code = NotFound desc = could not find container \"22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75\": container with ID starting with 22dbe70c5863149d919b5c2ca9d94d5970757b3063c56759756a6c76c8c9cc75 not found: ID does not exist" Dec 04 17:42:49 crc kubenswrapper[4631]: I1204 17:42:49.002082 4631 scope.go:117] "RemoveContainer" containerID="00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2" Dec 04 17:42:49 crc kubenswrapper[4631]: I1204 17:42:49.002545 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2"} err="failed to get container status \"00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\": rpc error: code = NotFound desc = could not find container \"00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2\": container with ID starting with 00e440ff5e8129c4ddef087cff03938d986569a6c81da85c0b5e40e0975317a2 not found: ID does not exist" Dec 04 17:42:49 crc kubenswrapper[4631]: I1204 17:42:49.740668 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" event={"ID":"1d623ba1-82b7-4e29-8b85-00643d387aad","Type":"ContainerStarted","Data":"8c0a5f6e725982219b393e3b29d0ee4edddf9328123c9ed2cf0af662d9b18352"} Dec 04 17:42:49 crc kubenswrapper[4631]: I1204 17:42:49.741432 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" event={"ID":"1d623ba1-82b7-4e29-8b85-00643d387aad","Type":"ContainerStarted","Data":"1e51d348493245297ab071a39f6260c1c134b6c6c3e3b0c63d626435968e58ca"} Dec 04 17:42:49 crc kubenswrapper[4631]: I1204 17:42:49.741449 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" event={"ID":"1d623ba1-82b7-4e29-8b85-00643d387aad","Type":"ContainerStarted","Data":"d36781c9489b46b605cd3df54830447f20cd3e6e13ab9111d60c1b76f558276e"} Dec 04 17:42:49 crc kubenswrapper[4631]: I1204 17:42:49.741459 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" event={"ID":"1d623ba1-82b7-4e29-8b85-00643d387aad","Type":"ContainerStarted","Data":"b32a223e09d9fc94455c9ca2489a614d8c6df1ed2ce1b372abe2fc4d54dc5c0a"} Dec 04 17:42:49 crc kubenswrapper[4631]: I1204 17:42:49.741470 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" event={"ID":"1d623ba1-82b7-4e29-8b85-00643d387aad","Type":"ContainerStarted","Data":"7218255e867bb740bb27689f4f0f07b3d367422985e2f7b4f4efca358c4fcafb"} Dec 04 17:42:49 crc kubenswrapper[4631]: I1204 17:42:49.741482 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" event={"ID":"1d623ba1-82b7-4e29-8b85-00643d387aad","Type":"ContainerStarted","Data":"d319c0a5e829a9c1651a2887f9395c8215f4be72d23a0f37c612fbfda11f0fa7"} Dec 04 17:42:50 crc kubenswrapper[4631]: I1204 17:42:50.253536 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d617abc-dc04-4807-b684-3640cde38e81" path="/var/lib/kubelet/pods/0d617abc-dc04-4807-b684-3640cde38e81/volumes" Dec 04 17:42:52 crc kubenswrapper[4631]: I1204 17:42:52.777267 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" event={"ID":"1d623ba1-82b7-4e29-8b85-00643d387aad","Type":"ContainerStarted","Data":"c417b1719f8196d228607ea25f45a9d6b8e148674773ec0096a7f33f0bedbf55"} Dec 04 17:42:54 crc kubenswrapper[4631]: I1204 17:42:54.795185 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" event={"ID":"1d623ba1-82b7-4e29-8b85-00643d387aad","Type":"ContainerStarted","Data":"e96550d812894134b31377bfea4b7b6ed95c9adaa509d2a8bec6253b978166a2"} Dec 04 17:42:55 crc kubenswrapper[4631]: I1204 17:42:55.799667 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:55 crc kubenswrapper[4631]: I1204 17:42:55.831658 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" podStartSLOduration=8.831642516 podStartE2EDuration="8.831642516s" podCreationTimestamp="2025-12-04 17:42:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:42:55.828703772 +0000 UTC m=+905.860945770" watchObservedRunningTime="2025-12-04 17:42:55.831642516 +0000 UTC m=+905.863884504" Dec 04 17:42:55 crc kubenswrapper[4631]: I1204 17:42:55.882358 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:56 crc kubenswrapper[4631]: I1204 17:42:56.806433 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:56 crc kubenswrapper[4631]: I1204 17:42:56.806749 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:42:56 crc kubenswrapper[4631]: I1204 17:42:56.834183 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:43:18 crc kubenswrapper[4631]: I1204 17:43:18.264519 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-t5hnw" Dec 04 17:43:29 crc kubenswrapper[4631]: I1204 17:43:29.468210 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84"] Dec 04 17:43:29 crc kubenswrapper[4631]: I1204 17:43:29.470581 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84" Dec 04 17:43:29 crc kubenswrapper[4631]: I1204 17:43:29.473007 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 04 17:43:29 crc kubenswrapper[4631]: I1204 17:43:29.482564 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84"] Dec 04 17:43:29 crc kubenswrapper[4631]: I1204 17:43:29.615099 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqrgf\" (UniqueName: \"kubernetes.io/projected/12f1788b-0cad-4272-9208-6ed4bd4d2ac0-kube-api-access-qqrgf\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84\" (UID: \"12f1788b-0cad-4272-9208-6ed4bd4d2ac0\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84" Dec 04 17:43:29 crc kubenswrapper[4631]: I1204 17:43:29.615674 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/12f1788b-0cad-4272-9208-6ed4bd4d2ac0-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84\" (UID: \"12f1788b-0cad-4272-9208-6ed4bd4d2ac0\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84" Dec 04 17:43:29 crc kubenswrapper[4631]: I1204 17:43:29.615716 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/12f1788b-0cad-4272-9208-6ed4bd4d2ac0-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84\" (UID: \"12f1788b-0cad-4272-9208-6ed4bd4d2ac0\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84" Dec 04 17:43:29 crc kubenswrapper[4631]: I1204 17:43:29.716345 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/12f1788b-0cad-4272-9208-6ed4bd4d2ac0-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84\" (UID: \"12f1788b-0cad-4272-9208-6ed4bd4d2ac0\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84" Dec 04 17:43:29 crc kubenswrapper[4631]: I1204 17:43:29.716445 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/12f1788b-0cad-4272-9208-6ed4bd4d2ac0-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84\" (UID: \"12f1788b-0cad-4272-9208-6ed4bd4d2ac0\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84" Dec 04 17:43:29 crc kubenswrapper[4631]: I1204 17:43:29.716516 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqrgf\" (UniqueName: \"kubernetes.io/projected/12f1788b-0cad-4272-9208-6ed4bd4d2ac0-kube-api-access-qqrgf\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84\" (UID: \"12f1788b-0cad-4272-9208-6ed4bd4d2ac0\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84" Dec 04 17:43:29 crc kubenswrapper[4631]: I1204 17:43:29.716979 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/12f1788b-0cad-4272-9208-6ed4bd4d2ac0-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84\" (UID: \"12f1788b-0cad-4272-9208-6ed4bd4d2ac0\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84" Dec 04 17:43:29 crc kubenswrapper[4631]: I1204 17:43:29.717060 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/12f1788b-0cad-4272-9208-6ed4bd4d2ac0-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84\" (UID: \"12f1788b-0cad-4272-9208-6ed4bd4d2ac0\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84" Dec 04 17:43:29 crc kubenswrapper[4631]: I1204 17:43:29.737898 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqrgf\" (UniqueName: \"kubernetes.io/projected/12f1788b-0cad-4272-9208-6ed4bd4d2ac0-kube-api-access-qqrgf\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84\" (UID: \"12f1788b-0cad-4272-9208-6ed4bd4d2ac0\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84" Dec 04 17:43:29 crc kubenswrapper[4631]: I1204 17:43:29.794999 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84" Dec 04 17:43:30 crc kubenswrapper[4631]: I1204 17:43:30.045300 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84"] Dec 04 17:43:31 crc kubenswrapper[4631]: I1204 17:43:31.034156 4631 generic.go:334] "Generic (PLEG): container finished" podID="12f1788b-0cad-4272-9208-6ed4bd4d2ac0" containerID="fc9c3c9b6f67cbc209af4586b53e9f24c30a6fa8f21f6015438427b39d3cc350" exitCode=0 Dec 04 17:43:31 crc kubenswrapper[4631]: I1204 17:43:31.034197 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84" event={"ID":"12f1788b-0cad-4272-9208-6ed4bd4d2ac0","Type":"ContainerDied","Data":"fc9c3c9b6f67cbc209af4586b53e9f24c30a6fa8f21f6015438427b39d3cc350"} Dec 04 17:43:31 crc kubenswrapper[4631]: I1204 17:43:31.034222 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84" event={"ID":"12f1788b-0cad-4272-9208-6ed4bd4d2ac0","Type":"ContainerStarted","Data":"c3114138730e8d808f60e922e6398ed57c53bc87da6dca2f2d29e74fb464305e"} Dec 04 17:43:31 crc kubenswrapper[4631]: I1204 17:43:31.811727 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qx752"] Dec 04 17:43:31 crc kubenswrapper[4631]: I1204 17:43:31.813942 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qx752" Dec 04 17:43:31 crc kubenswrapper[4631]: I1204 17:43:31.823270 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qx752"] Dec 04 17:43:31 crc kubenswrapper[4631]: I1204 17:43:31.944593 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfrdh\" (UniqueName: \"kubernetes.io/projected/af443b11-a32c-48c5-8dfe-88dd94e43608-kube-api-access-nfrdh\") pod \"redhat-operators-qx752\" (UID: \"af443b11-a32c-48c5-8dfe-88dd94e43608\") " pod="openshift-marketplace/redhat-operators-qx752" Dec 04 17:43:31 crc kubenswrapper[4631]: I1204 17:43:31.944667 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af443b11-a32c-48c5-8dfe-88dd94e43608-utilities\") pod \"redhat-operators-qx752\" (UID: \"af443b11-a32c-48c5-8dfe-88dd94e43608\") " pod="openshift-marketplace/redhat-operators-qx752" Dec 04 17:43:31 crc kubenswrapper[4631]: I1204 17:43:31.944687 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af443b11-a32c-48c5-8dfe-88dd94e43608-catalog-content\") pod \"redhat-operators-qx752\" (UID: \"af443b11-a32c-48c5-8dfe-88dd94e43608\") " pod="openshift-marketplace/redhat-operators-qx752" Dec 04 17:43:32 crc kubenswrapper[4631]: I1204 17:43:32.045311 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfrdh\" (UniqueName: \"kubernetes.io/projected/af443b11-a32c-48c5-8dfe-88dd94e43608-kube-api-access-nfrdh\") pod \"redhat-operators-qx752\" (UID: \"af443b11-a32c-48c5-8dfe-88dd94e43608\") " pod="openshift-marketplace/redhat-operators-qx752" Dec 04 17:43:32 crc kubenswrapper[4631]: I1204 17:43:32.045395 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af443b11-a32c-48c5-8dfe-88dd94e43608-utilities\") pod \"redhat-operators-qx752\" (UID: \"af443b11-a32c-48c5-8dfe-88dd94e43608\") " pod="openshift-marketplace/redhat-operators-qx752" Dec 04 17:43:32 crc kubenswrapper[4631]: I1204 17:43:32.045414 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af443b11-a32c-48c5-8dfe-88dd94e43608-catalog-content\") pod \"redhat-operators-qx752\" (UID: \"af443b11-a32c-48c5-8dfe-88dd94e43608\") " pod="openshift-marketplace/redhat-operators-qx752" Dec 04 17:43:32 crc kubenswrapper[4631]: I1204 17:43:32.045864 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af443b11-a32c-48c5-8dfe-88dd94e43608-catalog-content\") pod \"redhat-operators-qx752\" (UID: \"af443b11-a32c-48c5-8dfe-88dd94e43608\") " pod="openshift-marketplace/redhat-operators-qx752" Dec 04 17:43:32 crc kubenswrapper[4631]: I1204 17:43:32.046112 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af443b11-a32c-48c5-8dfe-88dd94e43608-utilities\") pod \"redhat-operators-qx752\" (UID: \"af443b11-a32c-48c5-8dfe-88dd94e43608\") " pod="openshift-marketplace/redhat-operators-qx752" Dec 04 17:43:32 crc kubenswrapper[4631]: I1204 17:43:32.068026 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfrdh\" (UniqueName: \"kubernetes.io/projected/af443b11-a32c-48c5-8dfe-88dd94e43608-kube-api-access-nfrdh\") pod \"redhat-operators-qx752\" (UID: \"af443b11-a32c-48c5-8dfe-88dd94e43608\") " pod="openshift-marketplace/redhat-operators-qx752" Dec 04 17:43:32 crc kubenswrapper[4631]: I1204 17:43:32.138879 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qx752" Dec 04 17:43:32 crc kubenswrapper[4631]: I1204 17:43:32.385902 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qx752"] Dec 04 17:43:32 crc kubenswrapper[4631]: W1204 17:43:32.401346 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf443b11_a32c_48c5_8dfe_88dd94e43608.slice/crio-837561c03eb3be594cd0c953b84dd6b8499e7d7628a7571a8c3909154d9bbdb9 WatchSource:0}: Error finding container 837561c03eb3be594cd0c953b84dd6b8499e7d7628a7571a8c3909154d9bbdb9: Status 404 returned error can't find the container with id 837561c03eb3be594cd0c953b84dd6b8499e7d7628a7571a8c3909154d9bbdb9 Dec 04 17:43:33 crc kubenswrapper[4631]: I1204 17:43:33.046028 4631 generic.go:334] "Generic (PLEG): container finished" podID="12f1788b-0cad-4272-9208-6ed4bd4d2ac0" containerID="7289d4da9e04be9e572d4933af7f2dcb941e03d5c36acb25b3404c06037d66ae" exitCode=0 Dec 04 17:43:33 crc kubenswrapper[4631]: I1204 17:43:33.046120 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84" event={"ID":"12f1788b-0cad-4272-9208-6ed4bd4d2ac0","Type":"ContainerDied","Data":"7289d4da9e04be9e572d4933af7f2dcb941e03d5c36acb25b3404c06037d66ae"} Dec 04 17:43:33 crc kubenswrapper[4631]: I1204 17:43:33.047305 4631 generic.go:334] "Generic (PLEG): container finished" podID="af443b11-a32c-48c5-8dfe-88dd94e43608" containerID="fbb0232d3af014f4c970c833747d340b3d6d36b0ab115ac0ec9ed2f0ecb12bb6" exitCode=0 Dec 04 17:43:33 crc kubenswrapper[4631]: I1204 17:43:33.047344 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qx752" event={"ID":"af443b11-a32c-48c5-8dfe-88dd94e43608","Type":"ContainerDied","Data":"fbb0232d3af014f4c970c833747d340b3d6d36b0ab115ac0ec9ed2f0ecb12bb6"} Dec 04 17:43:33 crc kubenswrapper[4631]: I1204 17:43:33.047394 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qx752" event={"ID":"af443b11-a32c-48c5-8dfe-88dd94e43608","Type":"ContainerStarted","Data":"837561c03eb3be594cd0c953b84dd6b8499e7d7628a7571a8c3909154d9bbdb9"} Dec 04 17:43:34 crc kubenswrapper[4631]: I1204 17:43:34.071671 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qx752" event={"ID":"af443b11-a32c-48c5-8dfe-88dd94e43608","Type":"ContainerStarted","Data":"01c74943cd8edaca45491c59ea41efaf3634a53b41980395228663233276ae97"} Dec 04 17:43:34 crc kubenswrapper[4631]: I1204 17:43:34.074785 4631 generic.go:334] "Generic (PLEG): container finished" podID="12f1788b-0cad-4272-9208-6ed4bd4d2ac0" containerID="29691e6aa4c9f11b608af95f126af55aa92d56ce06d264299f4620a5362fbb3e" exitCode=0 Dec 04 17:43:34 crc kubenswrapper[4631]: I1204 17:43:34.074818 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84" event={"ID":"12f1788b-0cad-4272-9208-6ed4bd4d2ac0","Type":"ContainerDied","Data":"29691e6aa4c9f11b608af95f126af55aa92d56ce06d264299f4620a5362fbb3e"} Dec 04 17:43:35 crc kubenswrapper[4631]: I1204 17:43:35.089050 4631 generic.go:334] "Generic (PLEG): container finished" podID="af443b11-a32c-48c5-8dfe-88dd94e43608" containerID="01c74943cd8edaca45491c59ea41efaf3634a53b41980395228663233276ae97" exitCode=0 Dec 04 17:43:35 crc kubenswrapper[4631]: I1204 17:43:35.089156 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qx752" event={"ID":"af443b11-a32c-48c5-8dfe-88dd94e43608","Type":"ContainerDied","Data":"01c74943cd8edaca45491c59ea41efaf3634a53b41980395228663233276ae97"} Dec 04 17:43:35 crc kubenswrapper[4631]: I1204 17:43:35.362026 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84" Dec 04 17:43:35 crc kubenswrapper[4631]: I1204 17:43:35.391653 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/12f1788b-0cad-4272-9208-6ed4bd4d2ac0-util\") pod \"12f1788b-0cad-4272-9208-6ed4bd4d2ac0\" (UID: \"12f1788b-0cad-4272-9208-6ed4bd4d2ac0\") " Dec 04 17:43:35 crc kubenswrapper[4631]: I1204 17:43:35.391712 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qqrgf\" (UniqueName: \"kubernetes.io/projected/12f1788b-0cad-4272-9208-6ed4bd4d2ac0-kube-api-access-qqrgf\") pod \"12f1788b-0cad-4272-9208-6ed4bd4d2ac0\" (UID: \"12f1788b-0cad-4272-9208-6ed4bd4d2ac0\") " Dec 04 17:43:35 crc kubenswrapper[4631]: I1204 17:43:35.391776 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/12f1788b-0cad-4272-9208-6ed4bd4d2ac0-bundle\") pod \"12f1788b-0cad-4272-9208-6ed4bd4d2ac0\" (UID: \"12f1788b-0cad-4272-9208-6ed4bd4d2ac0\") " Dec 04 17:43:35 crc kubenswrapper[4631]: I1204 17:43:35.392674 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12f1788b-0cad-4272-9208-6ed4bd4d2ac0-bundle" (OuterVolumeSpecName: "bundle") pod "12f1788b-0cad-4272-9208-6ed4bd4d2ac0" (UID: "12f1788b-0cad-4272-9208-6ed4bd4d2ac0"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:43:35 crc kubenswrapper[4631]: I1204 17:43:35.398439 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12f1788b-0cad-4272-9208-6ed4bd4d2ac0-kube-api-access-qqrgf" (OuterVolumeSpecName: "kube-api-access-qqrgf") pod "12f1788b-0cad-4272-9208-6ed4bd4d2ac0" (UID: "12f1788b-0cad-4272-9208-6ed4bd4d2ac0"). InnerVolumeSpecName "kube-api-access-qqrgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:43:35 crc kubenswrapper[4631]: I1204 17:43:35.407249 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12f1788b-0cad-4272-9208-6ed4bd4d2ac0-util" (OuterVolumeSpecName: "util") pod "12f1788b-0cad-4272-9208-6ed4bd4d2ac0" (UID: "12f1788b-0cad-4272-9208-6ed4bd4d2ac0"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:43:35 crc kubenswrapper[4631]: I1204 17:43:35.492767 4631 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/12f1788b-0cad-4272-9208-6ed4bd4d2ac0-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:43:35 crc kubenswrapper[4631]: I1204 17:43:35.492813 4631 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/12f1788b-0cad-4272-9208-6ed4bd4d2ac0-util\") on node \"crc\" DevicePath \"\"" Dec 04 17:43:35 crc kubenswrapper[4631]: I1204 17:43:35.492827 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qqrgf\" (UniqueName: \"kubernetes.io/projected/12f1788b-0cad-4272-9208-6ed4bd4d2ac0-kube-api-access-qqrgf\") on node \"crc\" DevicePath \"\"" Dec 04 17:43:36 crc kubenswrapper[4631]: I1204 17:43:36.023306 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:43:36 crc kubenswrapper[4631]: I1204 17:43:36.023856 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:43:36 crc kubenswrapper[4631]: I1204 17:43:36.098328 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84" event={"ID":"12f1788b-0cad-4272-9208-6ed4bd4d2ac0","Type":"ContainerDied","Data":"c3114138730e8d808f60e922e6398ed57c53bc87da6dca2f2d29e74fb464305e"} Dec 04 17:43:36 crc kubenswrapper[4631]: I1204 17:43:36.098401 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3114138730e8d808f60e922e6398ed57c53bc87da6dca2f2d29e74fb464305e" Dec 04 17:43:36 crc kubenswrapper[4631]: I1204 17:43:36.098399 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84" Dec 04 17:43:36 crc kubenswrapper[4631]: I1204 17:43:36.102138 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qx752" event={"ID":"af443b11-a32c-48c5-8dfe-88dd94e43608","Type":"ContainerStarted","Data":"6915e3e26be283b84c97e985923d55d7c27955120f08538ed6bd67c414a35544"} Dec 04 17:43:36 crc kubenswrapper[4631]: I1204 17:43:36.134054 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qx752" podStartSLOduration=2.672455423 podStartE2EDuration="5.134028123s" podCreationTimestamp="2025-12-04 17:43:31 +0000 UTC" firstStartedPulling="2025-12-04 17:43:33.049774528 +0000 UTC m=+943.082016526" lastFinishedPulling="2025-12-04 17:43:35.511347218 +0000 UTC m=+945.543589226" observedRunningTime="2025-12-04 17:43:36.129327 +0000 UTC m=+946.161569018" watchObservedRunningTime="2025-12-04 17:43:36.134028123 +0000 UTC m=+946.166270151" Dec 04 17:43:39 crc kubenswrapper[4631]: I1204 17:43:39.853840 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-2b87p"] Dec 04 17:43:39 crc kubenswrapper[4631]: E1204 17:43:39.854509 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12f1788b-0cad-4272-9208-6ed4bd4d2ac0" containerName="pull" Dec 04 17:43:39 crc kubenswrapper[4631]: I1204 17:43:39.854522 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="12f1788b-0cad-4272-9208-6ed4bd4d2ac0" containerName="pull" Dec 04 17:43:39 crc kubenswrapper[4631]: E1204 17:43:39.854541 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12f1788b-0cad-4272-9208-6ed4bd4d2ac0" containerName="extract" Dec 04 17:43:39 crc kubenswrapper[4631]: I1204 17:43:39.854548 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="12f1788b-0cad-4272-9208-6ed4bd4d2ac0" containerName="extract" Dec 04 17:43:39 crc kubenswrapper[4631]: E1204 17:43:39.854558 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12f1788b-0cad-4272-9208-6ed4bd4d2ac0" containerName="util" Dec 04 17:43:39 crc kubenswrapper[4631]: I1204 17:43:39.854564 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="12f1788b-0cad-4272-9208-6ed4bd4d2ac0" containerName="util" Dec 04 17:43:39 crc kubenswrapper[4631]: I1204 17:43:39.854651 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="12f1788b-0cad-4272-9208-6ed4bd4d2ac0" containerName="extract" Dec 04 17:43:39 crc kubenswrapper[4631]: I1204 17:43:39.855019 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-2b87p" Dec 04 17:43:39 crc kubenswrapper[4631]: I1204 17:43:39.861106 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 04 17:43:39 crc kubenswrapper[4631]: I1204 17:43:39.891427 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 04 17:43:39 crc kubenswrapper[4631]: I1204 17:43:39.891726 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-rvzhb" Dec 04 17:43:39 crc kubenswrapper[4631]: I1204 17:43:39.902053 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-2b87p"] Dec 04 17:43:40 crc kubenswrapper[4631]: I1204 17:43:40.044563 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2tgf\" (UniqueName: \"kubernetes.io/projected/f095f34f-aa8e-4f97-a34d-63fbc8722163-kube-api-access-d2tgf\") pod \"nmstate-operator-5b5b58f5c8-2b87p\" (UID: \"f095f34f-aa8e-4f97-a34d-63fbc8722163\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-2b87p" Dec 04 17:43:40 crc kubenswrapper[4631]: I1204 17:43:40.145331 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2tgf\" (UniqueName: \"kubernetes.io/projected/f095f34f-aa8e-4f97-a34d-63fbc8722163-kube-api-access-d2tgf\") pod \"nmstate-operator-5b5b58f5c8-2b87p\" (UID: \"f095f34f-aa8e-4f97-a34d-63fbc8722163\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-2b87p" Dec 04 17:43:40 crc kubenswrapper[4631]: I1204 17:43:40.173543 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2tgf\" (UniqueName: \"kubernetes.io/projected/f095f34f-aa8e-4f97-a34d-63fbc8722163-kube-api-access-d2tgf\") pod \"nmstate-operator-5b5b58f5c8-2b87p\" (UID: \"f095f34f-aa8e-4f97-a34d-63fbc8722163\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-2b87p" Dec 04 17:43:40 crc kubenswrapper[4631]: I1204 17:43:40.195933 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-2b87p" Dec 04 17:43:40 crc kubenswrapper[4631]: I1204 17:43:40.622482 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-2b87p"] Dec 04 17:43:41 crc kubenswrapper[4631]: I1204 17:43:41.129624 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-2b87p" event={"ID":"f095f34f-aa8e-4f97-a34d-63fbc8722163","Type":"ContainerStarted","Data":"4db7360a9980b5810d6fd652c49c9b7e91fce948424a252125d82915b6b9b710"} Dec 04 17:43:42 crc kubenswrapper[4631]: I1204 17:43:42.140008 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qx752" Dec 04 17:43:42 crc kubenswrapper[4631]: I1204 17:43:42.140550 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qx752" Dec 04 17:43:42 crc kubenswrapper[4631]: I1204 17:43:42.187193 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qx752" Dec 04 17:43:43 crc kubenswrapper[4631]: I1204 17:43:43.186242 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qx752" Dec 04 17:43:44 crc kubenswrapper[4631]: I1204 17:43:44.149568 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-2b87p" event={"ID":"f095f34f-aa8e-4f97-a34d-63fbc8722163","Type":"ContainerStarted","Data":"dd8494e8a1390d7d4495fb679c8b70a807bf20fb44871504b00c345997a1a6b7"} Dec 04 17:43:44 crc kubenswrapper[4631]: I1204 17:43:44.168398 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-2b87p" podStartSLOduration=2.457427747 podStartE2EDuration="5.168379968s" podCreationTimestamp="2025-12-04 17:43:39 +0000 UTC" firstStartedPulling="2025-12-04 17:43:40.639438149 +0000 UTC m=+950.671680147" lastFinishedPulling="2025-12-04 17:43:43.35039036 +0000 UTC m=+953.382632368" observedRunningTime="2025-12-04 17:43:44.165707973 +0000 UTC m=+954.197949991" watchObservedRunningTime="2025-12-04 17:43:44.168379968 +0000 UTC m=+954.200621966" Dec 04 17:43:44 crc kubenswrapper[4631]: I1204 17:43:44.603967 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qx752"] Dec 04 17:43:45 crc kubenswrapper[4631]: I1204 17:43:45.155062 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qx752" podUID="af443b11-a32c-48c5-8dfe-88dd94e43608" containerName="registry-server" containerID="cri-o://6915e3e26be283b84c97e985923d55d7c27955120f08538ed6bd67c414a35544" gracePeriod=2 Dec 04 17:43:47 crc kubenswrapper[4631]: I1204 17:43:47.168200 4631 generic.go:334] "Generic (PLEG): container finished" podID="af443b11-a32c-48c5-8dfe-88dd94e43608" containerID="6915e3e26be283b84c97e985923d55d7c27955120f08538ed6bd67c414a35544" exitCode=0 Dec 04 17:43:47 crc kubenswrapper[4631]: I1204 17:43:47.169510 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qx752" event={"ID":"af443b11-a32c-48c5-8dfe-88dd94e43608","Type":"ContainerDied","Data":"6915e3e26be283b84c97e985923d55d7c27955120f08538ed6bd67c414a35544"} Dec 04 17:43:47 crc kubenswrapper[4631]: I1204 17:43:47.350110 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qx752" Dec 04 17:43:47 crc kubenswrapper[4631]: I1204 17:43:47.445517 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af443b11-a32c-48c5-8dfe-88dd94e43608-utilities\") pod \"af443b11-a32c-48c5-8dfe-88dd94e43608\" (UID: \"af443b11-a32c-48c5-8dfe-88dd94e43608\") " Dec 04 17:43:47 crc kubenswrapper[4631]: I1204 17:43:47.445577 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfrdh\" (UniqueName: \"kubernetes.io/projected/af443b11-a32c-48c5-8dfe-88dd94e43608-kube-api-access-nfrdh\") pod \"af443b11-a32c-48c5-8dfe-88dd94e43608\" (UID: \"af443b11-a32c-48c5-8dfe-88dd94e43608\") " Dec 04 17:43:47 crc kubenswrapper[4631]: I1204 17:43:47.445619 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af443b11-a32c-48c5-8dfe-88dd94e43608-catalog-content\") pod \"af443b11-a32c-48c5-8dfe-88dd94e43608\" (UID: \"af443b11-a32c-48c5-8dfe-88dd94e43608\") " Dec 04 17:43:47 crc kubenswrapper[4631]: I1204 17:43:47.446706 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af443b11-a32c-48c5-8dfe-88dd94e43608-utilities" (OuterVolumeSpecName: "utilities") pod "af443b11-a32c-48c5-8dfe-88dd94e43608" (UID: "af443b11-a32c-48c5-8dfe-88dd94e43608"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:43:47 crc kubenswrapper[4631]: I1204 17:43:47.451566 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af443b11-a32c-48c5-8dfe-88dd94e43608-kube-api-access-nfrdh" (OuterVolumeSpecName: "kube-api-access-nfrdh") pod "af443b11-a32c-48c5-8dfe-88dd94e43608" (UID: "af443b11-a32c-48c5-8dfe-88dd94e43608"). InnerVolumeSpecName "kube-api-access-nfrdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:43:47 crc kubenswrapper[4631]: I1204 17:43:47.546529 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af443b11-a32c-48c5-8dfe-88dd94e43608-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 17:43:47 crc kubenswrapper[4631]: I1204 17:43:47.546567 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfrdh\" (UniqueName: \"kubernetes.io/projected/af443b11-a32c-48c5-8dfe-88dd94e43608-kube-api-access-nfrdh\") on node \"crc\" DevicePath \"\"" Dec 04 17:43:47 crc kubenswrapper[4631]: I1204 17:43:47.562812 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af443b11-a32c-48c5-8dfe-88dd94e43608-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "af443b11-a32c-48c5-8dfe-88dd94e43608" (UID: "af443b11-a32c-48c5-8dfe-88dd94e43608"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:43:47 crc kubenswrapper[4631]: I1204 17:43:47.647170 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af443b11-a32c-48c5-8dfe-88dd94e43608-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 17:43:48 crc kubenswrapper[4631]: I1204 17:43:48.177948 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qx752" event={"ID":"af443b11-a32c-48c5-8dfe-88dd94e43608","Type":"ContainerDied","Data":"837561c03eb3be594cd0c953b84dd6b8499e7d7628a7571a8c3909154d9bbdb9"} Dec 04 17:43:48 crc kubenswrapper[4631]: I1204 17:43:48.178446 4631 scope.go:117] "RemoveContainer" containerID="6915e3e26be283b84c97e985923d55d7c27955120f08538ed6bd67c414a35544" Dec 04 17:43:48 crc kubenswrapper[4631]: I1204 17:43:48.178022 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qx752" Dec 04 17:43:48 crc kubenswrapper[4631]: I1204 17:43:48.201160 4631 scope.go:117] "RemoveContainer" containerID="01c74943cd8edaca45491c59ea41efaf3634a53b41980395228663233276ae97" Dec 04 17:43:48 crc kubenswrapper[4631]: I1204 17:43:48.210467 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qx752"] Dec 04 17:43:48 crc kubenswrapper[4631]: I1204 17:43:48.216051 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qx752"] Dec 04 17:43:48 crc kubenswrapper[4631]: I1204 17:43:48.224559 4631 scope.go:117] "RemoveContainer" containerID="fbb0232d3af014f4c970c833747d340b3d6d36b0ab115ac0ec9ed2f0ecb12bb6" Dec 04 17:43:48 crc kubenswrapper[4631]: I1204 17:43:48.249218 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af443b11-a32c-48c5-8dfe-88dd94e43608" path="/var/lib/kubelet/pods/af443b11-a32c-48c5-8dfe-88dd94e43608/volumes" Dec 04 17:43:49 crc kubenswrapper[4631]: I1204 17:43:49.998171 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-8pp22"] Dec 04 17:43:49 crc kubenswrapper[4631]: E1204 17:43:49.999663 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af443b11-a32c-48c5-8dfe-88dd94e43608" containerName="registry-server" Dec 04 17:43:49 crc kubenswrapper[4631]: I1204 17:43:49.999738 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="af443b11-a32c-48c5-8dfe-88dd94e43608" containerName="registry-server" Dec 04 17:43:49 crc kubenswrapper[4631]: E1204 17:43:49.999795 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af443b11-a32c-48c5-8dfe-88dd94e43608" containerName="extract-utilities" Dec 04 17:43:49 crc kubenswrapper[4631]: I1204 17:43:49.999848 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="af443b11-a32c-48c5-8dfe-88dd94e43608" containerName="extract-utilities" Dec 04 17:43:49 crc kubenswrapper[4631]: E1204 17:43:49.999910 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af443b11-a32c-48c5-8dfe-88dd94e43608" containerName="extract-content" Dec 04 17:43:49 crc kubenswrapper[4631]: I1204 17:43:49.999963 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="af443b11-a32c-48c5-8dfe-88dd94e43608" containerName="extract-content" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.000204 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="af443b11-a32c-48c5-8dfe-88dd94e43608" containerName="registry-server" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.000935 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8pp22" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.004519 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-md7vc" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.011730 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kd9nq"] Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.012506 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kd9nq" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.014959 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-8pp22"] Dec 04 17:43:50 crc kubenswrapper[4631]: W1204 17:43:50.015398 4631 reflector.go:561] object-"openshift-nmstate"/"openshift-nmstate-webhook": failed to list *v1.Secret: secrets "openshift-nmstate-webhook" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-nmstate": no relationship found between node 'crc' and this object Dec 04 17:43:50 crc kubenswrapper[4631]: E1204 17:43:50.015442 4631 reflector.go:158] "Unhandled Error" err="object-\"openshift-nmstate\"/\"openshift-nmstate-webhook\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-nmstate-webhook\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-nmstate\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.036251 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-dqwmp"] Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.037004 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-dqwmp" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.071342 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kd9nq"] Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.166950 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-5cbf9"] Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.167657 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-5cbf9" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.169262 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.175904 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9-ovs-socket\") pod \"nmstate-handler-dqwmp\" (UID: \"a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9\") " pod="openshift-nmstate/nmstate-handler-dqwmp" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.175951 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9-dbus-socket\") pod \"nmstate-handler-dqwmp\" (UID: \"a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9\") " pod="openshift-nmstate/nmstate-handler-dqwmp" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.175977 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4spc\" (UniqueName: \"kubernetes.io/projected/a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9-kube-api-access-j4spc\") pod \"nmstate-handler-dqwmp\" (UID: \"a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9\") " pod="openshift-nmstate/nmstate-handler-dqwmp" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.176444 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twpsp\" (UniqueName: \"kubernetes.io/projected/6af82021-e54a-415b-963a-3e0ca6f7fd5c-kube-api-access-twpsp\") pod \"nmstate-webhook-5f6d4c5ccb-kd9nq\" (UID: \"6af82021-e54a-415b-963a-3e0ca6f7fd5c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kd9nq" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.176556 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9-nmstate-lock\") pod \"nmstate-handler-dqwmp\" (UID: \"a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9\") " pod="openshift-nmstate/nmstate-handler-dqwmp" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.176646 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pp5c7\" (UniqueName: \"kubernetes.io/projected/061c2acd-2d43-420a-8c0f-d31fcd0b2d3e-kube-api-access-pp5c7\") pod \"nmstate-metrics-7f946cbc9-8pp22\" (UID: \"061c2acd-2d43-420a-8c0f-d31fcd0b2d3e\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8pp22" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.176735 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/6af82021-e54a-415b-963a-3e0ca6f7fd5c-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-kd9nq\" (UID: \"6af82021-e54a-415b-963a-3e0ca6f7fd5c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kd9nq" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.178989 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.179821 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-5cbf9"] Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.186713 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-2mlpc" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.277435 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9-dbus-socket\") pod \"nmstate-handler-dqwmp\" (UID: \"a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9\") " pod="openshift-nmstate/nmstate-handler-dqwmp" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.277472 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4spc\" (UniqueName: \"kubernetes.io/projected/a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9-kube-api-access-j4spc\") pod \"nmstate-handler-dqwmp\" (UID: \"a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9\") " pod="openshift-nmstate/nmstate-handler-dqwmp" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.277517 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twpsp\" (UniqueName: \"kubernetes.io/projected/6af82021-e54a-415b-963a-3e0ca6f7fd5c-kube-api-access-twpsp\") pod \"nmstate-webhook-5f6d4c5ccb-kd9nq\" (UID: \"6af82021-e54a-415b-963a-3e0ca6f7fd5c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kd9nq" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.277540 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnm7c\" (UniqueName: \"kubernetes.io/projected/1bfeccd7-32aa-4315-96df-4d7df3f10767-kube-api-access-jnm7c\") pod \"nmstate-console-plugin-7fbb5f6569-5cbf9\" (UID: \"1bfeccd7-32aa-4315-96df-4d7df3f10767\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-5cbf9" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.277560 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9-nmstate-lock\") pod \"nmstate-handler-dqwmp\" (UID: \"a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9\") " pod="openshift-nmstate/nmstate-handler-dqwmp" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.277586 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pp5c7\" (UniqueName: \"kubernetes.io/projected/061c2acd-2d43-420a-8c0f-d31fcd0b2d3e-kube-api-access-pp5c7\") pod \"nmstate-metrics-7f946cbc9-8pp22\" (UID: \"061c2acd-2d43-420a-8c0f-d31fcd0b2d3e\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8pp22" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.277603 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/6af82021-e54a-415b-963a-3e0ca6f7fd5c-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-kd9nq\" (UID: \"6af82021-e54a-415b-963a-3e0ca6f7fd5c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kd9nq" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.277618 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/1bfeccd7-32aa-4315-96df-4d7df3f10767-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-5cbf9\" (UID: \"1bfeccd7-32aa-4315-96df-4d7df3f10767\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-5cbf9" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.277634 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/1bfeccd7-32aa-4315-96df-4d7df3f10767-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-5cbf9\" (UID: \"1bfeccd7-32aa-4315-96df-4d7df3f10767\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-5cbf9" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.277648 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9-ovs-socket\") pod \"nmstate-handler-dqwmp\" (UID: \"a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9\") " pod="openshift-nmstate/nmstate-handler-dqwmp" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.277705 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9-ovs-socket\") pod \"nmstate-handler-dqwmp\" (UID: \"a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9\") " pod="openshift-nmstate/nmstate-handler-dqwmp" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.277735 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9-nmstate-lock\") pod \"nmstate-handler-dqwmp\" (UID: \"a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9\") " pod="openshift-nmstate/nmstate-handler-dqwmp" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.277930 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9-dbus-socket\") pod \"nmstate-handler-dqwmp\" (UID: \"a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9\") " pod="openshift-nmstate/nmstate-handler-dqwmp" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.303643 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twpsp\" (UniqueName: \"kubernetes.io/projected/6af82021-e54a-415b-963a-3e0ca6f7fd5c-kube-api-access-twpsp\") pod \"nmstate-webhook-5f6d4c5ccb-kd9nq\" (UID: \"6af82021-e54a-415b-963a-3e0ca6f7fd5c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kd9nq" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.303660 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4spc\" (UniqueName: \"kubernetes.io/projected/a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9-kube-api-access-j4spc\") pod \"nmstate-handler-dqwmp\" (UID: \"a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9\") " pod="openshift-nmstate/nmstate-handler-dqwmp" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.310527 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pp5c7\" (UniqueName: \"kubernetes.io/projected/061c2acd-2d43-420a-8c0f-d31fcd0b2d3e-kube-api-access-pp5c7\") pod \"nmstate-metrics-7f946cbc9-8pp22\" (UID: \"061c2acd-2d43-420a-8c0f-d31fcd0b2d3e\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8pp22" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.321874 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-md7vc" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.331539 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8pp22" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.357918 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-dqwmp" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.378312 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnm7c\" (UniqueName: \"kubernetes.io/projected/1bfeccd7-32aa-4315-96df-4d7df3f10767-kube-api-access-jnm7c\") pod \"nmstate-console-plugin-7fbb5f6569-5cbf9\" (UID: \"1bfeccd7-32aa-4315-96df-4d7df3f10767\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-5cbf9" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.378898 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/1bfeccd7-32aa-4315-96df-4d7df3f10767-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-5cbf9\" (UID: \"1bfeccd7-32aa-4315-96df-4d7df3f10767\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-5cbf9" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.378930 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/1bfeccd7-32aa-4315-96df-4d7df3f10767-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-5cbf9\" (UID: \"1bfeccd7-32aa-4315-96df-4d7df3f10767\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-5cbf9" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.382144 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.382490 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.388321 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7cf5f9d788-ssdrd"] Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.389282 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.397021 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/1bfeccd7-32aa-4315-96df-4d7df3f10767-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-5cbf9\" (UID: \"1bfeccd7-32aa-4315-96df-4d7df3f10767\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-5cbf9" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.397051 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/1bfeccd7-32aa-4315-96df-4d7df3f10767-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-5cbf9\" (UID: \"1bfeccd7-32aa-4315-96df-4d7df3f10767\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-5cbf9" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.407144 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnm7c\" (UniqueName: \"kubernetes.io/projected/1bfeccd7-32aa-4315-96df-4d7df3f10767-kube-api-access-jnm7c\") pod \"nmstate-console-plugin-7fbb5f6569-5cbf9\" (UID: \"1bfeccd7-32aa-4315-96df-4d7df3f10767\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-5cbf9" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.413444 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7cf5f9d788-ssdrd"] Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.479887 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-console-config\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.479931 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-db6w7\" (UniqueName: \"kubernetes.io/projected/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-kube-api-access-db6w7\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.479974 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-console-oauth-config\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.480018 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-oauth-serving-cert\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.480033 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-trusted-ca-bundle\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.480054 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-service-ca\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.480082 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-console-serving-cert\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.486785 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-2mlpc" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.494653 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-5cbf9" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.580416 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-console-oauth-config\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.580478 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-oauth-serving-cert\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.580493 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-trusted-ca-bundle\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.580516 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-service-ca\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.580544 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-console-serving-cert\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.580561 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-console-config\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.580588 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-db6w7\" (UniqueName: \"kubernetes.io/projected/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-kube-api-access-db6w7\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.582105 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-trusted-ca-bundle\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.582427 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-service-ca\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.582651 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-console-config\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.582955 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-oauth-serving-cert\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.587334 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-console-serving-cert\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.588215 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-console-oauth-config\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.596162 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-8pp22"] Dec 04 17:43:50 crc kubenswrapper[4631]: W1204 17:43:50.606714 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod061c2acd_2d43_420a_8c0f_d31fcd0b2d3e.slice/crio-d368d5d162f208faf17205ac32b92cb1d50268296d9276482d785f305cbd0393 WatchSource:0}: Error finding container d368d5d162f208faf17205ac32b92cb1d50268296d9276482d785f305cbd0393: Status 404 returned error can't find the container with id d368d5d162f208faf17205ac32b92cb1d50268296d9276482d785f305cbd0393 Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.612024 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-db6w7\" (UniqueName: \"kubernetes.io/projected/e48cc0ec-4318-4cad-b4e6-5faa7384c41f-kube-api-access-db6w7\") pod \"console-7cf5f9d788-ssdrd\" (UID: \"e48cc0ec-4318-4cad-b4e6-5faa7384c41f\") " pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.695985 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-5cbf9"] Dec 04 17:43:50 crc kubenswrapper[4631]: W1204 17:43:50.696584 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1bfeccd7_32aa_4315_96df_4d7df3f10767.slice/crio-28f4ffe7c85be9b917794b3476ffa8a98e4629f70f3dcbfafcfb38d383529e61 WatchSource:0}: Error finding container 28f4ffe7c85be9b917794b3476ffa8a98e4629f70f3dcbfafcfb38d383529e61: Status 404 returned error can't find the container with id 28f4ffe7c85be9b917794b3476ffa8a98e4629f70f3dcbfafcfb38d383529e61 Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.745980 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:43:50 crc kubenswrapper[4631]: I1204 17:43:50.904053 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7cf5f9d788-ssdrd"] Dec 04 17:43:51 crc kubenswrapper[4631]: I1204 17:43:51.198278 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-5cbf9" event={"ID":"1bfeccd7-32aa-4315-96df-4d7df3f10767","Type":"ContainerStarted","Data":"28f4ffe7c85be9b917794b3476ffa8a98e4629f70f3dcbfafcfb38d383529e61"} Dec 04 17:43:51 crc kubenswrapper[4631]: I1204 17:43:51.200828 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-dqwmp" event={"ID":"a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9","Type":"ContainerStarted","Data":"810e13f85a9f3cbf66d90634e7ed184abbaaa341b944c586bd83a2af32efdbe8"} Dec 04 17:43:51 crc kubenswrapper[4631]: I1204 17:43:51.201826 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7cf5f9d788-ssdrd" event={"ID":"e48cc0ec-4318-4cad-b4e6-5faa7384c41f","Type":"ContainerStarted","Data":"d2dc4ae0856fb457616b46a43c0a3ed75b248d7aaa9d538a73226edd51564b8f"} Dec 04 17:43:51 crc kubenswrapper[4631]: I1204 17:43:51.202784 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8pp22" event={"ID":"061c2acd-2d43-420a-8c0f-d31fcd0b2d3e","Type":"ContainerStarted","Data":"d368d5d162f208faf17205ac32b92cb1d50268296d9276482d785f305cbd0393"} Dec 04 17:43:51 crc kubenswrapper[4631]: E1204 17:43:51.278724 4631 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: failed to sync secret cache: timed out waiting for the condition Dec 04 17:43:51 crc kubenswrapper[4631]: E1204 17:43:51.278811 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6af82021-e54a-415b-963a-3e0ca6f7fd5c-tls-key-pair podName:6af82021-e54a-415b-963a-3e0ca6f7fd5c nodeName:}" failed. No retries permitted until 2025-12-04 17:43:51.778792704 +0000 UTC m=+961.811034702 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/6af82021-e54a-415b-963a-3e0ca6f7fd5c-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-kd9nq" (UID: "6af82021-e54a-415b-963a-3e0ca6f7fd5c") : failed to sync secret cache: timed out waiting for the condition Dec 04 17:43:51 crc kubenswrapper[4631]: I1204 17:43:51.546282 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 04 17:43:51 crc kubenswrapper[4631]: I1204 17:43:51.797753 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/6af82021-e54a-415b-963a-3e0ca6f7fd5c-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-kd9nq\" (UID: \"6af82021-e54a-415b-963a-3e0ca6f7fd5c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kd9nq" Dec 04 17:43:51 crc kubenswrapper[4631]: I1204 17:43:51.804358 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/6af82021-e54a-415b-963a-3e0ca6f7fd5c-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-kd9nq\" (UID: \"6af82021-e54a-415b-963a-3e0ca6f7fd5c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kd9nq" Dec 04 17:43:51 crc kubenswrapper[4631]: I1204 17:43:51.831181 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kd9nq" Dec 04 17:43:52 crc kubenswrapper[4631]: I1204 17:43:52.087116 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kd9nq"] Dec 04 17:43:52 crc kubenswrapper[4631]: W1204 17:43:52.097678 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6af82021_e54a_415b_963a_3e0ca6f7fd5c.slice/crio-8148c1af37b76f90af6484ba3b6c0e6a141e2874737e7d5036eac919e937bc33 WatchSource:0}: Error finding container 8148c1af37b76f90af6484ba3b6c0e6a141e2874737e7d5036eac919e937bc33: Status 404 returned error can't find the container with id 8148c1af37b76f90af6484ba3b6c0e6a141e2874737e7d5036eac919e937bc33 Dec 04 17:43:52 crc kubenswrapper[4631]: I1204 17:43:52.209889 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kd9nq" event={"ID":"6af82021-e54a-415b-963a-3e0ca6f7fd5c","Type":"ContainerStarted","Data":"8148c1af37b76f90af6484ba3b6c0e6a141e2874737e7d5036eac919e937bc33"} Dec 04 17:43:52 crc kubenswrapper[4631]: I1204 17:43:52.211622 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7cf5f9d788-ssdrd" event={"ID":"e48cc0ec-4318-4cad-b4e6-5faa7384c41f","Type":"ContainerStarted","Data":"17612331783a306cefa1594e1a5eb447ee04e6a3dd6e42cec0b8ea6cb754fc96"} Dec 04 17:43:52 crc kubenswrapper[4631]: I1204 17:43:52.231572 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7cf5f9d788-ssdrd" podStartSLOduration=2.231550299 podStartE2EDuration="2.231550299s" podCreationTimestamp="2025-12-04 17:43:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:43:52.228694438 +0000 UTC m=+962.260936436" watchObservedRunningTime="2025-12-04 17:43:52.231550299 +0000 UTC m=+962.263792297" Dec 04 17:43:52 crc kubenswrapper[4631]: I1204 17:43:52.999101 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8h6s7"] Dec 04 17:43:53 crc kubenswrapper[4631]: I1204 17:43:53.000585 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8h6s7" Dec 04 17:43:53 crc kubenswrapper[4631]: I1204 17:43:53.051085 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8h6s7"] Dec 04 17:43:53 crc kubenswrapper[4631]: I1204 17:43:53.114710 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m88vm\" (UniqueName: \"kubernetes.io/projected/227a66ca-dced-40a0-86ae-34ebe6984e6d-kube-api-access-m88vm\") pod \"redhat-marketplace-8h6s7\" (UID: \"227a66ca-dced-40a0-86ae-34ebe6984e6d\") " pod="openshift-marketplace/redhat-marketplace-8h6s7" Dec 04 17:43:53 crc kubenswrapper[4631]: I1204 17:43:53.114781 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/227a66ca-dced-40a0-86ae-34ebe6984e6d-utilities\") pod \"redhat-marketplace-8h6s7\" (UID: \"227a66ca-dced-40a0-86ae-34ebe6984e6d\") " pod="openshift-marketplace/redhat-marketplace-8h6s7" Dec 04 17:43:53 crc kubenswrapper[4631]: I1204 17:43:53.114841 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/227a66ca-dced-40a0-86ae-34ebe6984e6d-catalog-content\") pod \"redhat-marketplace-8h6s7\" (UID: \"227a66ca-dced-40a0-86ae-34ebe6984e6d\") " pod="openshift-marketplace/redhat-marketplace-8h6s7" Dec 04 17:43:53 crc kubenswrapper[4631]: I1204 17:43:53.216537 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/227a66ca-dced-40a0-86ae-34ebe6984e6d-utilities\") pod \"redhat-marketplace-8h6s7\" (UID: \"227a66ca-dced-40a0-86ae-34ebe6984e6d\") " pod="openshift-marketplace/redhat-marketplace-8h6s7" Dec 04 17:43:53 crc kubenswrapper[4631]: I1204 17:43:53.216599 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/227a66ca-dced-40a0-86ae-34ebe6984e6d-catalog-content\") pod \"redhat-marketplace-8h6s7\" (UID: \"227a66ca-dced-40a0-86ae-34ebe6984e6d\") " pod="openshift-marketplace/redhat-marketplace-8h6s7" Dec 04 17:43:53 crc kubenswrapper[4631]: I1204 17:43:53.216641 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m88vm\" (UniqueName: \"kubernetes.io/projected/227a66ca-dced-40a0-86ae-34ebe6984e6d-kube-api-access-m88vm\") pod \"redhat-marketplace-8h6s7\" (UID: \"227a66ca-dced-40a0-86ae-34ebe6984e6d\") " pod="openshift-marketplace/redhat-marketplace-8h6s7" Dec 04 17:43:53 crc kubenswrapper[4631]: I1204 17:43:53.217319 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/227a66ca-dced-40a0-86ae-34ebe6984e6d-utilities\") pod \"redhat-marketplace-8h6s7\" (UID: \"227a66ca-dced-40a0-86ae-34ebe6984e6d\") " pod="openshift-marketplace/redhat-marketplace-8h6s7" Dec 04 17:43:53 crc kubenswrapper[4631]: I1204 17:43:53.217568 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/227a66ca-dced-40a0-86ae-34ebe6984e6d-catalog-content\") pod \"redhat-marketplace-8h6s7\" (UID: \"227a66ca-dced-40a0-86ae-34ebe6984e6d\") " pod="openshift-marketplace/redhat-marketplace-8h6s7" Dec 04 17:43:53 crc kubenswrapper[4631]: I1204 17:43:53.241311 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m88vm\" (UniqueName: \"kubernetes.io/projected/227a66ca-dced-40a0-86ae-34ebe6984e6d-kube-api-access-m88vm\") pod \"redhat-marketplace-8h6s7\" (UID: \"227a66ca-dced-40a0-86ae-34ebe6984e6d\") " pod="openshift-marketplace/redhat-marketplace-8h6s7" Dec 04 17:43:53 crc kubenswrapper[4631]: I1204 17:43:53.384745 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8h6s7" Dec 04 17:43:54 crc kubenswrapper[4631]: I1204 17:43:54.537248 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8h6s7"] Dec 04 17:43:54 crc kubenswrapper[4631]: W1204 17:43:54.545537 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod227a66ca_dced_40a0_86ae_34ebe6984e6d.slice/crio-f78a27b2eacbfcb792e59f462912a4e3def3776e90b1c2f6d9c0a6cc5cfb1413 WatchSource:0}: Error finding container f78a27b2eacbfcb792e59f462912a4e3def3776e90b1c2f6d9c0a6cc5cfb1413: Status 404 returned error can't find the container with id f78a27b2eacbfcb792e59f462912a4e3def3776e90b1c2f6d9c0a6cc5cfb1413 Dec 04 17:43:55 crc kubenswrapper[4631]: I1204 17:43:55.241544 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-5cbf9" event={"ID":"1bfeccd7-32aa-4315-96df-4d7df3f10767","Type":"ContainerStarted","Data":"adb9ebfac7afbf2a5f2b227697f7aef589eb98d460cdc78a0cae4361f2421c1b"} Dec 04 17:43:55 crc kubenswrapper[4631]: I1204 17:43:55.244017 4631 generic.go:334] "Generic (PLEG): container finished" podID="227a66ca-dced-40a0-86ae-34ebe6984e6d" containerID="7235134814c881827794ad5d6ec78d315e96117c276091aacb4f4e2f756a51a3" exitCode=0 Dec 04 17:43:55 crc kubenswrapper[4631]: I1204 17:43:55.244331 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8h6s7" event={"ID":"227a66ca-dced-40a0-86ae-34ebe6984e6d","Type":"ContainerDied","Data":"7235134814c881827794ad5d6ec78d315e96117c276091aacb4f4e2f756a51a3"} Dec 04 17:43:55 crc kubenswrapper[4631]: I1204 17:43:55.244396 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8h6s7" event={"ID":"227a66ca-dced-40a0-86ae-34ebe6984e6d","Type":"ContainerStarted","Data":"f78a27b2eacbfcb792e59f462912a4e3def3776e90b1c2f6d9c0a6cc5cfb1413"} Dec 04 17:43:55 crc kubenswrapper[4631]: I1204 17:43:55.249002 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-dqwmp" event={"ID":"a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9","Type":"ContainerStarted","Data":"8a829a283136df5e186acfad316aab63d829ed363ce7805b86368e5877b9a277"} Dec 04 17:43:55 crc kubenswrapper[4631]: I1204 17:43:55.249180 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-dqwmp" Dec 04 17:43:55 crc kubenswrapper[4631]: I1204 17:43:55.253081 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kd9nq" event={"ID":"6af82021-e54a-415b-963a-3e0ca6f7fd5c","Type":"ContainerStarted","Data":"3e1a55ad0ac6ba643c64967d8e7b45a4fdd8242e156ff43f9d54e506e6292bc0"} Dec 04 17:43:55 crc kubenswrapper[4631]: I1204 17:43:55.253273 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kd9nq" Dec 04 17:43:55 crc kubenswrapper[4631]: I1204 17:43:55.258674 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8pp22" event={"ID":"061c2acd-2d43-420a-8c0f-d31fcd0b2d3e","Type":"ContainerStarted","Data":"126614ce0cbccb58f2e4136bb0ce909f1f0b1f3998de43e6a360536dc0d8e1ae"} Dec 04 17:43:55 crc kubenswrapper[4631]: I1204 17:43:55.268233 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-5cbf9" podStartSLOduration=1.868978141 podStartE2EDuration="5.268209716s" podCreationTimestamp="2025-12-04 17:43:50 +0000 UTC" firstStartedPulling="2025-12-04 17:43:50.698604712 +0000 UTC m=+960.730846700" lastFinishedPulling="2025-12-04 17:43:54.097836267 +0000 UTC m=+964.130078275" observedRunningTime="2025-12-04 17:43:55.255235419 +0000 UTC m=+965.287477427" watchObservedRunningTime="2025-12-04 17:43:55.268209716 +0000 UTC m=+965.300451724" Dec 04 17:43:55 crc kubenswrapper[4631]: I1204 17:43:55.281870 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kd9nq" podStartSLOduration=4.284666496 podStartE2EDuration="6.281846392s" podCreationTimestamp="2025-12-04 17:43:49 +0000 UTC" firstStartedPulling="2025-12-04 17:43:52.101762513 +0000 UTC m=+962.134004511" lastFinishedPulling="2025-12-04 17:43:54.098942409 +0000 UTC m=+964.131184407" observedRunningTime="2025-12-04 17:43:55.275946285 +0000 UTC m=+965.308188283" watchObservedRunningTime="2025-12-04 17:43:55.281846392 +0000 UTC m=+965.314088400" Dec 04 17:43:55 crc kubenswrapper[4631]: I1204 17:43:55.324418 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-dqwmp" podStartSLOduration=1.657456119 podStartE2EDuration="5.324395437s" podCreationTimestamp="2025-12-04 17:43:50 +0000 UTC" firstStartedPulling="2025-12-04 17:43:50.430852178 +0000 UTC m=+960.463094176" lastFinishedPulling="2025-12-04 17:43:54.097791486 +0000 UTC m=+964.130033494" observedRunningTime="2025-12-04 17:43:55.318337686 +0000 UTC m=+965.350579684" watchObservedRunningTime="2025-12-04 17:43:55.324395437 +0000 UTC m=+965.356637435" Dec 04 17:43:57 crc kubenswrapper[4631]: I1204 17:43:57.276462 4631 generic.go:334] "Generic (PLEG): container finished" podID="227a66ca-dced-40a0-86ae-34ebe6984e6d" containerID="cc7d3d9336969965848727137b3343b5474c3faa3f2c612c7a09bd4c530f6bd3" exitCode=0 Dec 04 17:43:57 crc kubenswrapper[4631]: I1204 17:43:57.276544 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8h6s7" event={"ID":"227a66ca-dced-40a0-86ae-34ebe6984e6d","Type":"ContainerDied","Data":"cc7d3d9336969965848727137b3343b5474c3faa3f2c612c7a09bd4c530f6bd3"} Dec 04 17:43:57 crc kubenswrapper[4631]: I1204 17:43:57.280693 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8pp22" event={"ID":"061c2acd-2d43-420a-8c0f-d31fcd0b2d3e","Type":"ContainerStarted","Data":"8d1848c8b49a877d367be1bcf46bb12961df008ac0cdef48c5f0da5a10efb554"} Dec 04 17:43:57 crc kubenswrapper[4631]: I1204 17:43:57.349858 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8pp22" podStartSLOduration=2.395554751 podStartE2EDuration="8.349823233s" podCreationTimestamp="2025-12-04 17:43:49 +0000 UTC" firstStartedPulling="2025-12-04 17:43:50.611354401 +0000 UTC m=+960.643596399" lastFinishedPulling="2025-12-04 17:43:56.565622873 +0000 UTC m=+966.597864881" observedRunningTime="2025-12-04 17:43:57.344533953 +0000 UTC m=+967.376775991" watchObservedRunningTime="2025-12-04 17:43:57.349823233 +0000 UTC m=+967.382065261" Dec 04 17:43:58 crc kubenswrapper[4631]: I1204 17:43:58.290747 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8h6s7" event={"ID":"227a66ca-dced-40a0-86ae-34ebe6984e6d","Type":"ContainerStarted","Data":"ee1b3323b706897b96ea9bbbe135af77508ef203c219106b289719e2ebc75e7f"} Dec 04 17:43:58 crc kubenswrapper[4631]: I1204 17:43:58.315440 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8h6s7" podStartSLOduration=3.808814027 podStartE2EDuration="6.315416331s" podCreationTimestamp="2025-12-04 17:43:52 +0000 UTC" firstStartedPulling="2025-12-04 17:43:55.245303987 +0000 UTC m=+965.277545985" lastFinishedPulling="2025-12-04 17:43:57.751906281 +0000 UTC m=+967.784148289" observedRunningTime="2025-12-04 17:43:58.310575454 +0000 UTC m=+968.342817462" watchObservedRunningTime="2025-12-04 17:43:58.315416331 +0000 UTC m=+968.347658349" Dec 04 17:44:00 crc kubenswrapper[4631]: I1204 17:44:00.384149 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-dqwmp" Dec 04 17:44:00 crc kubenswrapper[4631]: I1204 17:44:00.747000 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:44:00 crc kubenswrapper[4631]: I1204 17:44:00.747391 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:44:00 crc kubenswrapper[4631]: I1204 17:44:00.752073 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:44:01 crc kubenswrapper[4631]: I1204 17:44:01.311624 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7cf5f9d788-ssdrd" Dec 04 17:44:01 crc kubenswrapper[4631]: I1204 17:44:01.361833 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-kl45g"] Dec 04 17:44:03 crc kubenswrapper[4631]: I1204 17:44:03.385003 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8h6s7" Dec 04 17:44:03 crc kubenswrapper[4631]: I1204 17:44:03.385553 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8h6s7" Dec 04 17:44:03 crc kubenswrapper[4631]: I1204 17:44:03.433656 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8h6s7" Dec 04 17:44:04 crc kubenswrapper[4631]: I1204 17:44:04.367753 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8h6s7" Dec 04 17:44:04 crc kubenswrapper[4631]: I1204 17:44:04.409213 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8h6s7"] Dec 04 17:44:06 crc kubenswrapper[4631]: I1204 17:44:06.022926 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:44:06 crc kubenswrapper[4631]: I1204 17:44:06.023304 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:44:06 crc kubenswrapper[4631]: I1204 17:44:06.339175 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8h6s7" podUID="227a66ca-dced-40a0-86ae-34ebe6984e6d" containerName="registry-server" containerID="cri-o://ee1b3323b706897b96ea9bbbe135af77508ef203c219106b289719e2ebc75e7f" gracePeriod=2 Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.245267 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8h6s7" Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.347831 4631 generic.go:334] "Generic (PLEG): container finished" podID="227a66ca-dced-40a0-86ae-34ebe6984e6d" containerID="ee1b3323b706897b96ea9bbbe135af77508ef203c219106b289719e2ebc75e7f" exitCode=0 Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.347889 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8h6s7" event={"ID":"227a66ca-dced-40a0-86ae-34ebe6984e6d","Type":"ContainerDied","Data":"ee1b3323b706897b96ea9bbbe135af77508ef203c219106b289719e2ebc75e7f"} Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.347918 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8h6s7" event={"ID":"227a66ca-dced-40a0-86ae-34ebe6984e6d","Type":"ContainerDied","Data":"f78a27b2eacbfcb792e59f462912a4e3def3776e90b1c2f6d9c0a6cc5cfb1413"} Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.347938 4631 scope.go:117] "RemoveContainer" containerID="ee1b3323b706897b96ea9bbbe135af77508ef203c219106b289719e2ebc75e7f" Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.348376 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8h6s7" Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.365820 4631 scope.go:117] "RemoveContainer" containerID="cc7d3d9336969965848727137b3343b5474c3faa3f2c612c7a09bd4c530f6bd3" Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.387682 4631 scope.go:117] "RemoveContainer" containerID="7235134814c881827794ad5d6ec78d315e96117c276091aacb4f4e2f756a51a3" Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.404160 4631 scope.go:117] "RemoveContainer" containerID="ee1b3323b706897b96ea9bbbe135af77508ef203c219106b289719e2ebc75e7f" Dec 04 17:44:07 crc kubenswrapper[4631]: E1204 17:44:07.404515 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee1b3323b706897b96ea9bbbe135af77508ef203c219106b289719e2ebc75e7f\": container with ID starting with ee1b3323b706897b96ea9bbbe135af77508ef203c219106b289719e2ebc75e7f not found: ID does not exist" containerID="ee1b3323b706897b96ea9bbbe135af77508ef203c219106b289719e2ebc75e7f" Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.404545 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee1b3323b706897b96ea9bbbe135af77508ef203c219106b289719e2ebc75e7f"} err="failed to get container status \"ee1b3323b706897b96ea9bbbe135af77508ef203c219106b289719e2ebc75e7f\": rpc error: code = NotFound desc = could not find container \"ee1b3323b706897b96ea9bbbe135af77508ef203c219106b289719e2ebc75e7f\": container with ID starting with ee1b3323b706897b96ea9bbbe135af77508ef203c219106b289719e2ebc75e7f not found: ID does not exist" Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.404565 4631 scope.go:117] "RemoveContainer" containerID="cc7d3d9336969965848727137b3343b5474c3faa3f2c612c7a09bd4c530f6bd3" Dec 04 17:44:07 crc kubenswrapper[4631]: E1204 17:44:07.404748 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc7d3d9336969965848727137b3343b5474c3faa3f2c612c7a09bd4c530f6bd3\": container with ID starting with cc7d3d9336969965848727137b3343b5474c3faa3f2c612c7a09bd4c530f6bd3 not found: ID does not exist" containerID="cc7d3d9336969965848727137b3343b5474c3faa3f2c612c7a09bd4c530f6bd3" Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.404762 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc7d3d9336969965848727137b3343b5474c3faa3f2c612c7a09bd4c530f6bd3"} err="failed to get container status \"cc7d3d9336969965848727137b3343b5474c3faa3f2c612c7a09bd4c530f6bd3\": rpc error: code = NotFound desc = could not find container \"cc7d3d9336969965848727137b3343b5474c3faa3f2c612c7a09bd4c530f6bd3\": container with ID starting with cc7d3d9336969965848727137b3343b5474c3faa3f2c612c7a09bd4c530f6bd3 not found: ID does not exist" Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.404773 4631 scope.go:117] "RemoveContainer" containerID="7235134814c881827794ad5d6ec78d315e96117c276091aacb4f4e2f756a51a3" Dec 04 17:44:07 crc kubenswrapper[4631]: E1204 17:44:07.404936 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7235134814c881827794ad5d6ec78d315e96117c276091aacb4f4e2f756a51a3\": container with ID starting with 7235134814c881827794ad5d6ec78d315e96117c276091aacb4f4e2f756a51a3 not found: ID does not exist" containerID="7235134814c881827794ad5d6ec78d315e96117c276091aacb4f4e2f756a51a3" Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.404949 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7235134814c881827794ad5d6ec78d315e96117c276091aacb4f4e2f756a51a3"} err="failed to get container status \"7235134814c881827794ad5d6ec78d315e96117c276091aacb4f4e2f756a51a3\": rpc error: code = NotFound desc = could not find container \"7235134814c881827794ad5d6ec78d315e96117c276091aacb4f4e2f756a51a3\": container with ID starting with 7235134814c881827794ad5d6ec78d315e96117c276091aacb4f4e2f756a51a3 not found: ID does not exist" Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.424558 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/227a66ca-dced-40a0-86ae-34ebe6984e6d-utilities\") pod \"227a66ca-dced-40a0-86ae-34ebe6984e6d\" (UID: \"227a66ca-dced-40a0-86ae-34ebe6984e6d\") " Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.424617 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/227a66ca-dced-40a0-86ae-34ebe6984e6d-catalog-content\") pod \"227a66ca-dced-40a0-86ae-34ebe6984e6d\" (UID: \"227a66ca-dced-40a0-86ae-34ebe6984e6d\") " Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.424686 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m88vm\" (UniqueName: \"kubernetes.io/projected/227a66ca-dced-40a0-86ae-34ebe6984e6d-kube-api-access-m88vm\") pod \"227a66ca-dced-40a0-86ae-34ebe6984e6d\" (UID: \"227a66ca-dced-40a0-86ae-34ebe6984e6d\") " Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.426454 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/227a66ca-dced-40a0-86ae-34ebe6984e6d-utilities" (OuterVolumeSpecName: "utilities") pod "227a66ca-dced-40a0-86ae-34ebe6984e6d" (UID: "227a66ca-dced-40a0-86ae-34ebe6984e6d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.434634 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/227a66ca-dced-40a0-86ae-34ebe6984e6d-kube-api-access-m88vm" (OuterVolumeSpecName: "kube-api-access-m88vm") pod "227a66ca-dced-40a0-86ae-34ebe6984e6d" (UID: "227a66ca-dced-40a0-86ae-34ebe6984e6d"). InnerVolumeSpecName "kube-api-access-m88vm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.441718 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/227a66ca-dced-40a0-86ae-34ebe6984e6d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "227a66ca-dced-40a0-86ae-34ebe6984e6d" (UID: "227a66ca-dced-40a0-86ae-34ebe6984e6d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.526199 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/227a66ca-dced-40a0-86ae-34ebe6984e6d-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.526245 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/227a66ca-dced-40a0-86ae-34ebe6984e6d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.526261 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m88vm\" (UniqueName: \"kubernetes.io/projected/227a66ca-dced-40a0-86ae-34ebe6984e6d-kube-api-access-m88vm\") on node \"crc\" DevicePath \"\"" Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.689660 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8h6s7"] Dec 04 17:44:07 crc kubenswrapper[4631]: I1204 17:44:07.693300 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8h6s7"] Dec 04 17:44:08 crc kubenswrapper[4631]: I1204 17:44:08.248994 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="227a66ca-dced-40a0-86ae-34ebe6984e6d" path="/var/lib/kubelet/pods/227a66ca-dced-40a0-86ae-34ebe6984e6d/volumes" Dec 04 17:44:11 crc kubenswrapper[4631]: I1204 17:44:11.838720 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kd9nq" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.013079 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28"] Dec 04 17:44:26 crc kubenswrapper[4631]: E1204 17:44:26.014192 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="227a66ca-dced-40a0-86ae-34ebe6984e6d" containerName="extract-utilities" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.014206 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="227a66ca-dced-40a0-86ae-34ebe6984e6d" containerName="extract-utilities" Dec 04 17:44:26 crc kubenswrapper[4631]: E1204 17:44:26.014224 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="227a66ca-dced-40a0-86ae-34ebe6984e6d" containerName="registry-server" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.014231 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="227a66ca-dced-40a0-86ae-34ebe6984e6d" containerName="registry-server" Dec 04 17:44:26 crc kubenswrapper[4631]: E1204 17:44:26.014241 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="227a66ca-dced-40a0-86ae-34ebe6984e6d" containerName="extract-content" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.014248 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="227a66ca-dced-40a0-86ae-34ebe6984e6d" containerName="extract-content" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.014340 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="227a66ca-dced-40a0-86ae-34ebe6984e6d" containerName="registry-server" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.015103 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.016827 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.026359 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28"] Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.198723 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/104d954a-30c9-401b-8e56-817777e91f38-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28\" (UID: \"104d954a-30c9-401b-8e56-817777e91f38\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.198824 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/104d954a-30c9-401b-8e56-817777e91f38-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28\" (UID: \"104d954a-30c9-401b-8e56-817777e91f38\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.198858 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnbk2\" (UniqueName: \"kubernetes.io/projected/104d954a-30c9-401b-8e56-817777e91f38-kube-api-access-qnbk2\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28\" (UID: \"104d954a-30c9-401b-8e56-817777e91f38\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.299828 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/104d954a-30c9-401b-8e56-817777e91f38-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28\" (UID: \"104d954a-30c9-401b-8e56-817777e91f38\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.299912 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/104d954a-30c9-401b-8e56-817777e91f38-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28\" (UID: \"104d954a-30c9-401b-8e56-817777e91f38\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.299940 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnbk2\" (UniqueName: \"kubernetes.io/projected/104d954a-30c9-401b-8e56-817777e91f38-kube-api-access-qnbk2\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28\" (UID: \"104d954a-30c9-401b-8e56-817777e91f38\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.300954 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/104d954a-30c9-401b-8e56-817777e91f38-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28\" (UID: \"104d954a-30c9-401b-8e56-817777e91f38\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.301067 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/104d954a-30c9-401b-8e56-817777e91f38-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28\" (UID: \"104d954a-30c9-401b-8e56-817777e91f38\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.334747 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnbk2\" (UniqueName: \"kubernetes.io/projected/104d954a-30c9-401b-8e56-817777e91f38-kube-api-access-qnbk2\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28\" (UID: \"104d954a-30c9-401b-8e56-817777e91f38\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.373563 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.410156 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-kl45g" podUID="81559ff3-95e9-455f-9d90-46c5f1a981ce" containerName="console" containerID="cri-o://e0d1d532fa5a56c3f192ff264dd0efe882bcba977f2c11b0a5b512b51a486a9f" gracePeriod=15 Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.630400 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28"] Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.781958 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-kl45g_81559ff3-95e9-455f-9d90-46c5f1a981ce/console/0.log" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.782030 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.908910 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-oauth-serving-cert\") pod \"81559ff3-95e9-455f-9d90-46c5f1a981ce\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.909089 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/81559ff3-95e9-455f-9d90-46c5f1a981ce-console-serving-cert\") pod \"81559ff3-95e9-455f-9d90-46c5f1a981ce\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.909148 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-service-ca\") pod \"81559ff3-95e9-455f-9d90-46c5f1a981ce\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.909216 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-console-config\") pod \"81559ff3-95e9-455f-9d90-46c5f1a981ce\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.909258 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-trusted-ca-bundle\") pod \"81559ff3-95e9-455f-9d90-46c5f1a981ce\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.909334 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/81559ff3-95e9-455f-9d90-46c5f1a981ce-console-oauth-config\") pod \"81559ff3-95e9-455f-9d90-46c5f1a981ce\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.909396 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbdv8\" (UniqueName: \"kubernetes.io/projected/81559ff3-95e9-455f-9d90-46c5f1a981ce-kube-api-access-wbdv8\") pod \"81559ff3-95e9-455f-9d90-46c5f1a981ce\" (UID: \"81559ff3-95e9-455f-9d90-46c5f1a981ce\") " Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.910934 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "81559ff3-95e9-455f-9d90-46c5f1a981ce" (UID: "81559ff3-95e9-455f-9d90-46c5f1a981ce"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.911003 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-console-config" (OuterVolumeSpecName: "console-config") pod "81559ff3-95e9-455f-9d90-46c5f1a981ce" (UID: "81559ff3-95e9-455f-9d90-46c5f1a981ce"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.911660 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "81559ff3-95e9-455f-9d90-46c5f1a981ce" (UID: "81559ff3-95e9-455f-9d90-46c5f1a981ce"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.912440 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-service-ca" (OuterVolumeSpecName: "service-ca") pod "81559ff3-95e9-455f-9d90-46c5f1a981ce" (UID: "81559ff3-95e9-455f-9d90-46c5f1a981ce"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.916459 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81559ff3-95e9-455f-9d90-46c5f1a981ce-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "81559ff3-95e9-455f-9d90-46c5f1a981ce" (UID: "81559ff3-95e9-455f-9d90-46c5f1a981ce"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.916688 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81559ff3-95e9-455f-9d90-46c5f1a981ce-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "81559ff3-95e9-455f-9d90-46c5f1a981ce" (UID: "81559ff3-95e9-455f-9d90-46c5f1a981ce"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:44:26 crc kubenswrapper[4631]: I1204 17:44:26.916884 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81559ff3-95e9-455f-9d90-46c5f1a981ce-kube-api-access-wbdv8" (OuterVolumeSpecName: "kube-api-access-wbdv8") pod "81559ff3-95e9-455f-9d90-46c5f1a981ce" (UID: "81559ff3-95e9-455f-9d90-46c5f1a981ce"). InnerVolumeSpecName "kube-api-access-wbdv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.010784 4631 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-service-ca\") on node \"crc\" DevicePath \"\"" Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.010827 4631 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-console-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.010840 4631 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.010854 4631 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/81559ff3-95e9-455f-9d90-46c5f1a981ce-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.010867 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbdv8\" (UniqueName: \"kubernetes.io/projected/81559ff3-95e9-455f-9d90-46c5f1a981ce-kube-api-access-wbdv8\") on node \"crc\" DevicePath \"\"" Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.010878 4631 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/81559ff3-95e9-455f-9d90-46c5f1a981ce-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.010889 4631 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/81559ff3-95e9-455f-9d90-46c5f1a981ce-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.475748 4631 generic.go:334] "Generic (PLEG): container finished" podID="104d954a-30c9-401b-8e56-817777e91f38" containerID="4a609d613ca214eaaed45cddd7a4b2ef7e0f4c72a2075126833463d10910a3f8" exitCode=0 Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.475835 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28" event={"ID":"104d954a-30c9-401b-8e56-817777e91f38","Type":"ContainerDied","Data":"4a609d613ca214eaaed45cddd7a4b2ef7e0f4c72a2075126833463d10910a3f8"} Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.475865 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28" event={"ID":"104d954a-30c9-401b-8e56-817777e91f38","Type":"ContainerStarted","Data":"d01ac8968dfab9b9e9a145b9708de755c419572c2e121c1848cf43d632d0f055"} Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.480650 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-kl45g_81559ff3-95e9-455f-9d90-46c5f1a981ce/console/0.log" Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.480700 4631 generic.go:334] "Generic (PLEG): container finished" podID="81559ff3-95e9-455f-9d90-46c5f1a981ce" containerID="e0d1d532fa5a56c3f192ff264dd0efe882bcba977f2c11b0a5b512b51a486a9f" exitCode=2 Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.480734 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-kl45g" event={"ID":"81559ff3-95e9-455f-9d90-46c5f1a981ce","Type":"ContainerDied","Data":"e0d1d532fa5a56c3f192ff264dd0efe882bcba977f2c11b0a5b512b51a486a9f"} Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.480770 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-kl45g" event={"ID":"81559ff3-95e9-455f-9d90-46c5f1a981ce","Type":"ContainerDied","Data":"9a53639931dcaff4c0dda3b62d08387b8a9ba8f62219681043ee47b784d8b829"} Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.480794 4631 scope.go:117] "RemoveContainer" containerID="e0d1d532fa5a56c3f192ff264dd0efe882bcba977f2c11b0a5b512b51a486a9f" Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.480801 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-kl45g" Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.500656 4631 scope.go:117] "RemoveContainer" containerID="e0d1d532fa5a56c3f192ff264dd0efe882bcba977f2c11b0a5b512b51a486a9f" Dec 04 17:44:27 crc kubenswrapper[4631]: E1204 17:44:27.501126 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0d1d532fa5a56c3f192ff264dd0efe882bcba977f2c11b0a5b512b51a486a9f\": container with ID starting with e0d1d532fa5a56c3f192ff264dd0efe882bcba977f2c11b0a5b512b51a486a9f not found: ID does not exist" containerID="e0d1d532fa5a56c3f192ff264dd0efe882bcba977f2c11b0a5b512b51a486a9f" Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.501155 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0d1d532fa5a56c3f192ff264dd0efe882bcba977f2c11b0a5b512b51a486a9f"} err="failed to get container status \"e0d1d532fa5a56c3f192ff264dd0efe882bcba977f2c11b0a5b512b51a486a9f\": rpc error: code = NotFound desc = could not find container \"e0d1d532fa5a56c3f192ff264dd0efe882bcba977f2c11b0a5b512b51a486a9f\": container with ID starting with e0d1d532fa5a56c3f192ff264dd0efe882bcba977f2c11b0a5b512b51a486a9f not found: ID does not exist" Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.527908 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-kl45g"] Dec 04 17:44:27 crc kubenswrapper[4631]: I1204 17:44:27.531704 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-kl45g"] Dec 04 17:44:28 crc kubenswrapper[4631]: I1204 17:44:28.247308 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81559ff3-95e9-455f-9d90-46c5f1a981ce" path="/var/lib/kubelet/pods/81559ff3-95e9-455f-9d90-46c5f1a981ce/volumes" Dec 04 17:44:29 crc kubenswrapper[4631]: I1204 17:44:29.499660 4631 generic.go:334] "Generic (PLEG): container finished" podID="104d954a-30c9-401b-8e56-817777e91f38" containerID="7d28e7407de36ea8c5d45d1f8afc893edb7c7bd59af008afe21d9e109f84cfd1" exitCode=0 Dec 04 17:44:29 crc kubenswrapper[4631]: I1204 17:44:29.500224 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28" event={"ID":"104d954a-30c9-401b-8e56-817777e91f38","Type":"ContainerDied","Data":"7d28e7407de36ea8c5d45d1f8afc893edb7c7bd59af008afe21d9e109f84cfd1"} Dec 04 17:44:30 crc kubenswrapper[4631]: I1204 17:44:30.522359 4631 generic.go:334] "Generic (PLEG): container finished" podID="104d954a-30c9-401b-8e56-817777e91f38" containerID="02c22c6fe49444b2d8c076e4fdc72c6867ec3f1a5613d3c194c703a02315eec1" exitCode=0 Dec 04 17:44:30 crc kubenswrapper[4631]: I1204 17:44:30.522503 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28" event={"ID":"104d954a-30c9-401b-8e56-817777e91f38","Type":"ContainerDied","Data":"02c22c6fe49444b2d8c076e4fdc72c6867ec3f1a5613d3c194c703a02315eec1"} Dec 04 17:44:31 crc kubenswrapper[4631]: I1204 17:44:31.751838 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28" Dec 04 17:44:31 crc kubenswrapper[4631]: I1204 17:44:31.882793 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/104d954a-30c9-401b-8e56-817777e91f38-util\") pod \"104d954a-30c9-401b-8e56-817777e91f38\" (UID: \"104d954a-30c9-401b-8e56-817777e91f38\") " Dec 04 17:44:31 crc kubenswrapper[4631]: I1204 17:44:31.883206 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnbk2\" (UniqueName: \"kubernetes.io/projected/104d954a-30c9-401b-8e56-817777e91f38-kube-api-access-qnbk2\") pod \"104d954a-30c9-401b-8e56-817777e91f38\" (UID: \"104d954a-30c9-401b-8e56-817777e91f38\") " Dec 04 17:44:31 crc kubenswrapper[4631]: I1204 17:44:31.883242 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/104d954a-30c9-401b-8e56-817777e91f38-bundle\") pod \"104d954a-30c9-401b-8e56-817777e91f38\" (UID: \"104d954a-30c9-401b-8e56-817777e91f38\") " Dec 04 17:44:31 crc kubenswrapper[4631]: I1204 17:44:31.884290 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/104d954a-30c9-401b-8e56-817777e91f38-bundle" (OuterVolumeSpecName: "bundle") pod "104d954a-30c9-401b-8e56-817777e91f38" (UID: "104d954a-30c9-401b-8e56-817777e91f38"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:44:31 crc kubenswrapper[4631]: I1204 17:44:31.888579 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/104d954a-30c9-401b-8e56-817777e91f38-kube-api-access-qnbk2" (OuterVolumeSpecName: "kube-api-access-qnbk2") pod "104d954a-30c9-401b-8e56-817777e91f38" (UID: "104d954a-30c9-401b-8e56-817777e91f38"). InnerVolumeSpecName "kube-api-access-qnbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:44:31 crc kubenswrapper[4631]: I1204 17:44:31.900117 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/104d954a-30c9-401b-8e56-817777e91f38-util" (OuterVolumeSpecName: "util") pod "104d954a-30c9-401b-8e56-817777e91f38" (UID: "104d954a-30c9-401b-8e56-817777e91f38"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:44:31 crc kubenswrapper[4631]: I1204 17:44:31.984412 4631 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/104d954a-30c9-401b-8e56-817777e91f38-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:44:31 crc kubenswrapper[4631]: I1204 17:44:31.984445 4631 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/104d954a-30c9-401b-8e56-817777e91f38-util\") on node \"crc\" DevicePath \"\"" Dec 04 17:44:31 crc kubenswrapper[4631]: I1204 17:44:31.984457 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnbk2\" (UniqueName: \"kubernetes.io/projected/104d954a-30c9-401b-8e56-817777e91f38-kube-api-access-qnbk2\") on node \"crc\" DevicePath \"\"" Dec 04 17:44:32 crc kubenswrapper[4631]: I1204 17:44:32.535860 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28" event={"ID":"104d954a-30c9-401b-8e56-817777e91f38","Type":"ContainerDied","Data":"d01ac8968dfab9b9e9a145b9708de755c419572c2e121c1848cf43d632d0f055"} Dec 04 17:44:32 crc kubenswrapper[4631]: I1204 17:44:32.535931 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d01ac8968dfab9b9e9a145b9708de755c419572c2e121c1848cf43d632d0f055" Dec 04 17:44:32 crc kubenswrapper[4631]: I1204 17:44:32.536046 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28" Dec 04 17:44:36 crc kubenswrapper[4631]: I1204 17:44:36.022643 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:44:36 crc kubenswrapper[4631]: I1204 17:44:36.023191 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:44:36 crc kubenswrapper[4631]: I1204 17:44:36.023237 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:44:36 crc kubenswrapper[4631]: I1204 17:44:36.023918 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"34035c448316dbd54a5149bba9f1bb4ce2bc406518cc3e31ea4f3aeb08daaf2b"} pod="openshift-machine-config-operator/machine-config-daemon-q27wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 17:44:36 crc kubenswrapper[4631]: I1204 17:44:36.023967 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" containerID="cri-o://34035c448316dbd54a5149bba9f1bb4ce2bc406518cc3e31ea4f3aeb08daaf2b" gracePeriod=600 Dec 04 17:44:36 crc kubenswrapper[4631]: I1204 17:44:36.565183 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerID="34035c448316dbd54a5149bba9f1bb4ce2bc406518cc3e31ea4f3aeb08daaf2b" exitCode=0 Dec 04 17:44:36 crc kubenswrapper[4631]: I1204 17:44:36.565225 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerDied","Data":"34035c448316dbd54a5149bba9f1bb4ce2bc406518cc3e31ea4f3aeb08daaf2b"} Dec 04 17:44:36 crc kubenswrapper[4631]: I1204 17:44:36.565253 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"2b743632dd2ae2acbde167ee221ee6a9a0928b1076cfc48d3d7e9758476527f1"} Dec 04 17:44:36 crc kubenswrapper[4631]: I1204 17:44:36.565268 4631 scope.go:117] "RemoveContainer" containerID="2f45cdb997b468d85204e4185fae04daffcd45b83032693f3595260f7ec666b6" Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.309428 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6jntd"] Dec 04 17:44:39 crc kubenswrapper[4631]: E1204 17:44:39.310579 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="104d954a-30c9-401b-8e56-817777e91f38" containerName="pull" Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.310594 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="104d954a-30c9-401b-8e56-817777e91f38" containerName="pull" Dec 04 17:44:39 crc kubenswrapper[4631]: E1204 17:44:39.310609 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="104d954a-30c9-401b-8e56-817777e91f38" containerName="extract" Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.310616 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="104d954a-30c9-401b-8e56-817777e91f38" containerName="extract" Dec 04 17:44:39 crc kubenswrapper[4631]: E1204 17:44:39.310624 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81559ff3-95e9-455f-9d90-46c5f1a981ce" containerName="console" Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.310631 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="81559ff3-95e9-455f-9d90-46c5f1a981ce" containerName="console" Dec 04 17:44:39 crc kubenswrapper[4631]: E1204 17:44:39.310646 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="104d954a-30c9-401b-8e56-817777e91f38" containerName="util" Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.310652 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="104d954a-30c9-401b-8e56-817777e91f38" containerName="util" Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.310765 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="104d954a-30c9-401b-8e56-817777e91f38" containerName="extract" Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.310781 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="81559ff3-95e9-455f-9d90-46c5f1a981ce" containerName="console" Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.311941 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6jntd" Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.327239 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6jntd"] Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.488579 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnzmn\" (UniqueName: \"kubernetes.io/projected/53cfd817-edc8-46bb-b306-beceb1afb7a2-kube-api-access-dnzmn\") pod \"community-operators-6jntd\" (UID: \"53cfd817-edc8-46bb-b306-beceb1afb7a2\") " pod="openshift-marketplace/community-operators-6jntd" Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.488672 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53cfd817-edc8-46bb-b306-beceb1afb7a2-utilities\") pod \"community-operators-6jntd\" (UID: \"53cfd817-edc8-46bb-b306-beceb1afb7a2\") " pod="openshift-marketplace/community-operators-6jntd" Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.488830 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53cfd817-edc8-46bb-b306-beceb1afb7a2-catalog-content\") pod \"community-operators-6jntd\" (UID: \"53cfd817-edc8-46bb-b306-beceb1afb7a2\") " pod="openshift-marketplace/community-operators-6jntd" Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.589770 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53cfd817-edc8-46bb-b306-beceb1afb7a2-utilities\") pod \"community-operators-6jntd\" (UID: \"53cfd817-edc8-46bb-b306-beceb1afb7a2\") " pod="openshift-marketplace/community-operators-6jntd" Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.589970 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53cfd817-edc8-46bb-b306-beceb1afb7a2-catalog-content\") pod \"community-operators-6jntd\" (UID: \"53cfd817-edc8-46bb-b306-beceb1afb7a2\") " pod="openshift-marketplace/community-operators-6jntd" Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.590044 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnzmn\" (UniqueName: \"kubernetes.io/projected/53cfd817-edc8-46bb-b306-beceb1afb7a2-kube-api-access-dnzmn\") pod \"community-operators-6jntd\" (UID: \"53cfd817-edc8-46bb-b306-beceb1afb7a2\") " pod="openshift-marketplace/community-operators-6jntd" Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.591284 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53cfd817-edc8-46bb-b306-beceb1afb7a2-utilities\") pod \"community-operators-6jntd\" (UID: \"53cfd817-edc8-46bb-b306-beceb1afb7a2\") " pod="openshift-marketplace/community-operators-6jntd" Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.591375 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53cfd817-edc8-46bb-b306-beceb1afb7a2-catalog-content\") pod \"community-operators-6jntd\" (UID: \"53cfd817-edc8-46bb-b306-beceb1afb7a2\") " pod="openshift-marketplace/community-operators-6jntd" Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.615755 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnzmn\" (UniqueName: \"kubernetes.io/projected/53cfd817-edc8-46bb-b306-beceb1afb7a2-kube-api-access-dnzmn\") pod \"community-operators-6jntd\" (UID: \"53cfd817-edc8-46bb-b306-beceb1afb7a2\") " pod="openshift-marketplace/community-operators-6jntd" Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.628798 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6jntd" Dec 04 17:44:39 crc kubenswrapper[4631]: I1204 17:44:39.889486 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6jntd"] Dec 04 17:44:40 crc kubenswrapper[4631]: I1204 17:44:40.591398 4631 generic.go:334] "Generic (PLEG): container finished" podID="53cfd817-edc8-46bb-b306-beceb1afb7a2" containerID="e2a435cc17b8c1ced0a29a8a1f99b1514188a02a023f9f7fedaf97a795492422" exitCode=0 Dec 04 17:44:40 crc kubenswrapper[4631]: I1204 17:44:40.591495 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jntd" event={"ID":"53cfd817-edc8-46bb-b306-beceb1afb7a2","Type":"ContainerDied","Data":"e2a435cc17b8c1ced0a29a8a1f99b1514188a02a023f9f7fedaf97a795492422"} Dec 04 17:44:40 crc kubenswrapper[4631]: I1204 17:44:40.591966 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jntd" event={"ID":"53cfd817-edc8-46bb-b306-beceb1afb7a2","Type":"ContainerStarted","Data":"43da41b7512c044e6d32dd65c721edd5343ea39352e985a6997868d6e2f541f9"} Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.314304 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-9d6f9bbbc-h6txj"] Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.315667 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-9d6f9bbbc-h6txj" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.323060 4631 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.323091 4631 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.323174 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.323071 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.323298 4631 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-z65hz" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.347825 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-9d6f9bbbc-h6txj"] Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.414982 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmn4d\" (UniqueName: \"kubernetes.io/projected/7628937e-69d4-416b-bf62-0b8cb083c4b1-kube-api-access-lmn4d\") pod \"metallb-operator-controller-manager-9d6f9bbbc-h6txj\" (UID: \"7628937e-69d4-416b-bf62-0b8cb083c4b1\") " pod="metallb-system/metallb-operator-controller-manager-9d6f9bbbc-h6txj" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.415035 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7628937e-69d4-416b-bf62-0b8cb083c4b1-apiservice-cert\") pod \"metallb-operator-controller-manager-9d6f9bbbc-h6txj\" (UID: \"7628937e-69d4-416b-bf62-0b8cb083c4b1\") " pod="metallb-system/metallb-operator-controller-manager-9d6f9bbbc-h6txj" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.415152 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7628937e-69d4-416b-bf62-0b8cb083c4b1-webhook-cert\") pod \"metallb-operator-controller-manager-9d6f9bbbc-h6txj\" (UID: \"7628937e-69d4-416b-bf62-0b8cb083c4b1\") " pod="metallb-system/metallb-operator-controller-manager-9d6f9bbbc-h6txj" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.516466 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmn4d\" (UniqueName: \"kubernetes.io/projected/7628937e-69d4-416b-bf62-0b8cb083c4b1-kube-api-access-lmn4d\") pod \"metallb-operator-controller-manager-9d6f9bbbc-h6txj\" (UID: \"7628937e-69d4-416b-bf62-0b8cb083c4b1\") " pod="metallb-system/metallb-operator-controller-manager-9d6f9bbbc-h6txj" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.516543 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7628937e-69d4-416b-bf62-0b8cb083c4b1-apiservice-cert\") pod \"metallb-operator-controller-manager-9d6f9bbbc-h6txj\" (UID: \"7628937e-69d4-416b-bf62-0b8cb083c4b1\") " pod="metallb-system/metallb-operator-controller-manager-9d6f9bbbc-h6txj" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.518214 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7628937e-69d4-416b-bf62-0b8cb083c4b1-webhook-cert\") pod \"metallb-operator-controller-manager-9d6f9bbbc-h6txj\" (UID: \"7628937e-69d4-416b-bf62-0b8cb083c4b1\") " pod="metallb-system/metallb-operator-controller-manager-9d6f9bbbc-h6txj" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.528713 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7628937e-69d4-416b-bf62-0b8cb083c4b1-webhook-cert\") pod \"metallb-operator-controller-manager-9d6f9bbbc-h6txj\" (UID: \"7628937e-69d4-416b-bf62-0b8cb083c4b1\") " pod="metallb-system/metallb-operator-controller-manager-9d6f9bbbc-h6txj" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.540006 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7628937e-69d4-416b-bf62-0b8cb083c4b1-apiservice-cert\") pod \"metallb-operator-controller-manager-9d6f9bbbc-h6txj\" (UID: \"7628937e-69d4-416b-bf62-0b8cb083c4b1\") " pod="metallb-system/metallb-operator-controller-manager-9d6f9bbbc-h6txj" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.540980 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmn4d\" (UniqueName: \"kubernetes.io/projected/7628937e-69d4-416b-bf62-0b8cb083c4b1-kube-api-access-lmn4d\") pod \"metallb-operator-controller-manager-9d6f9bbbc-h6txj\" (UID: \"7628937e-69d4-416b-bf62-0b8cb083c4b1\") " pod="metallb-system/metallb-operator-controller-manager-9d6f9bbbc-h6txj" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.556500 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6dfc44c866-psc9v"] Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.557149 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6dfc44c866-psc9v" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.560336 4631 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-8rqkg" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.562667 4631 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.562809 4631 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.599347 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jntd" event={"ID":"53cfd817-edc8-46bb-b306-beceb1afb7a2","Type":"ContainerStarted","Data":"96528cece5fa2179f392ffbf0c453d0a053320862a7586f1343ac10d282fdf55"} Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.630619 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-9d6f9bbbc-h6txj" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.635517 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6dfc44c866-psc9v"] Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.721054 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b471a575-aeaa-473d-a180-161a7c07d2af-apiservice-cert\") pod \"metallb-operator-webhook-server-6dfc44c866-psc9v\" (UID: \"b471a575-aeaa-473d-a180-161a7c07d2af\") " pod="metallb-system/metallb-operator-webhook-server-6dfc44c866-psc9v" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.721107 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b471a575-aeaa-473d-a180-161a7c07d2af-webhook-cert\") pod \"metallb-operator-webhook-server-6dfc44c866-psc9v\" (UID: \"b471a575-aeaa-473d-a180-161a7c07d2af\") " pod="metallb-system/metallb-operator-webhook-server-6dfc44c866-psc9v" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.721129 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtnpk\" (UniqueName: \"kubernetes.io/projected/b471a575-aeaa-473d-a180-161a7c07d2af-kube-api-access-dtnpk\") pod \"metallb-operator-webhook-server-6dfc44c866-psc9v\" (UID: \"b471a575-aeaa-473d-a180-161a7c07d2af\") " pod="metallb-system/metallb-operator-webhook-server-6dfc44c866-psc9v" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.824517 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b471a575-aeaa-473d-a180-161a7c07d2af-apiservice-cert\") pod \"metallb-operator-webhook-server-6dfc44c866-psc9v\" (UID: \"b471a575-aeaa-473d-a180-161a7c07d2af\") " pod="metallb-system/metallb-operator-webhook-server-6dfc44c866-psc9v" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.824567 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b471a575-aeaa-473d-a180-161a7c07d2af-webhook-cert\") pod \"metallb-operator-webhook-server-6dfc44c866-psc9v\" (UID: \"b471a575-aeaa-473d-a180-161a7c07d2af\") " pod="metallb-system/metallb-operator-webhook-server-6dfc44c866-psc9v" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.824589 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtnpk\" (UniqueName: \"kubernetes.io/projected/b471a575-aeaa-473d-a180-161a7c07d2af-kube-api-access-dtnpk\") pod \"metallb-operator-webhook-server-6dfc44c866-psc9v\" (UID: \"b471a575-aeaa-473d-a180-161a7c07d2af\") " pod="metallb-system/metallb-operator-webhook-server-6dfc44c866-psc9v" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.833667 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b471a575-aeaa-473d-a180-161a7c07d2af-apiservice-cert\") pod \"metallb-operator-webhook-server-6dfc44c866-psc9v\" (UID: \"b471a575-aeaa-473d-a180-161a7c07d2af\") " pod="metallb-system/metallb-operator-webhook-server-6dfc44c866-psc9v" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.841350 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b471a575-aeaa-473d-a180-161a7c07d2af-webhook-cert\") pod \"metallb-operator-webhook-server-6dfc44c866-psc9v\" (UID: \"b471a575-aeaa-473d-a180-161a7c07d2af\") " pod="metallb-system/metallb-operator-webhook-server-6dfc44c866-psc9v" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.850764 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtnpk\" (UniqueName: \"kubernetes.io/projected/b471a575-aeaa-473d-a180-161a7c07d2af-kube-api-access-dtnpk\") pod \"metallb-operator-webhook-server-6dfc44c866-psc9v\" (UID: \"b471a575-aeaa-473d-a180-161a7c07d2af\") " pod="metallb-system/metallb-operator-webhook-server-6dfc44c866-psc9v" Dec 04 17:44:41 crc kubenswrapper[4631]: I1204 17:44:41.875757 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6dfc44c866-psc9v" Dec 04 17:44:42 crc kubenswrapper[4631]: I1204 17:44:42.023565 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-9d6f9bbbc-h6txj"] Dec 04 17:44:42 crc kubenswrapper[4631]: I1204 17:44:42.272434 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6dfc44c866-psc9v"] Dec 04 17:44:42 crc kubenswrapper[4631]: W1204 17:44:42.276829 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb471a575_aeaa_473d_a180_161a7c07d2af.slice/crio-e19f1440282f1e7541cebd6e977a3398359dcfa5c89144459606b7fb086bfe08 WatchSource:0}: Error finding container e19f1440282f1e7541cebd6e977a3398359dcfa5c89144459606b7fb086bfe08: Status 404 returned error can't find the container with id e19f1440282f1e7541cebd6e977a3398359dcfa5c89144459606b7fb086bfe08 Dec 04 17:44:42 crc kubenswrapper[4631]: I1204 17:44:42.605676 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-9d6f9bbbc-h6txj" event={"ID":"7628937e-69d4-416b-bf62-0b8cb083c4b1","Type":"ContainerStarted","Data":"1aa941d5bf5f4a1b36b553b5814bb6d1cdc6efff49d19de6e625c98cc6ebd08a"} Dec 04 17:44:42 crc kubenswrapper[4631]: I1204 17:44:42.607429 4631 generic.go:334] "Generic (PLEG): container finished" podID="53cfd817-edc8-46bb-b306-beceb1afb7a2" containerID="96528cece5fa2179f392ffbf0c453d0a053320862a7586f1343ac10d282fdf55" exitCode=0 Dec 04 17:44:42 crc kubenswrapper[4631]: I1204 17:44:42.607507 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jntd" event={"ID":"53cfd817-edc8-46bb-b306-beceb1afb7a2","Type":"ContainerDied","Data":"96528cece5fa2179f392ffbf0c453d0a053320862a7586f1343ac10d282fdf55"} Dec 04 17:44:42 crc kubenswrapper[4631]: I1204 17:44:42.608321 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6dfc44c866-psc9v" event={"ID":"b471a575-aeaa-473d-a180-161a7c07d2af","Type":"ContainerStarted","Data":"e19f1440282f1e7541cebd6e977a3398359dcfa5c89144459606b7fb086bfe08"} Dec 04 17:44:43 crc kubenswrapper[4631]: I1204 17:44:43.618760 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jntd" event={"ID":"53cfd817-edc8-46bb-b306-beceb1afb7a2","Type":"ContainerStarted","Data":"3973f99c712abee160429ba84b0980303e69ff21837b48389140ba85504a9fd7"} Dec 04 17:44:49 crc kubenswrapper[4631]: I1204 17:44:49.629421 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6jntd" Dec 04 17:44:49 crc kubenswrapper[4631]: I1204 17:44:49.630341 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6jntd" Dec 04 17:44:49 crc kubenswrapper[4631]: I1204 17:44:49.664090 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6dfc44c866-psc9v" event={"ID":"b471a575-aeaa-473d-a180-161a7c07d2af","Type":"ContainerStarted","Data":"0d4281a6b7618bfe3e2a500f68c10e789862abf0ae97e926acfbe9fd85cea927"} Dec 04 17:44:49 crc kubenswrapper[4631]: I1204 17:44:49.664240 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6dfc44c866-psc9v" Dec 04 17:44:49 crc kubenswrapper[4631]: I1204 17:44:49.666332 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-9d6f9bbbc-h6txj" event={"ID":"7628937e-69d4-416b-bf62-0b8cb083c4b1","Type":"ContainerStarted","Data":"4885eef5284c7e630c6266f82728b84b1e135cbb1322176108ff5f50cd55e7ea"} Dec 04 17:44:49 crc kubenswrapper[4631]: I1204 17:44:49.666708 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-9d6f9bbbc-h6txj" Dec 04 17:44:49 crc kubenswrapper[4631]: I1204 17:44:49.682967 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6jntd" podStartSLOduration=8.255409288 podStartE2EDuration="10.682948404s" podCreationTimestamp="2025-12-04 17:44:39 +0000 UTC" firstStartedPulling="2025-12-04 17:44:40.593438582 +0000 UTC m=+1010.625680580" lastFinishedPulling="2025-12-04 17:44:43.020977698 +0000 UTC m=+1013.053219696" observedRunningTime="2025-12-04 17:44:43.644846448 +0000 UTC m=+1013.677088456" watchObservedRunningTime="2025-12-04 17:44:49.682948404 +0000 UTC m=+1019.715190402" Dec 04 17:44:49 crc kubenswrapper[4631]: I1204 17:44:49.687271 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6dfc44c866-psc9v" podStartSLOduration=1.98787313 podStartE2EDuration="8.687247125s" podCreationTimestamp="2025-12-04 17:44:41 +0000 UTC" firstStartedPulling="2025-12-04 17:44:42.28068194 +0000 UTC m=+1012.312923938" lastFinishedPulling="2025-12-04 17:44:48.980055945 +0000 UTC m=+1019.012297933" observedRunningTime="2025-12-04 17:44:49.682317396 +0000 UTC m=+1019.714559394" watchObservedRunningTime="2025-12-04 17:44:49.687247125 +0000 UTC m=+1019.719489133" Dec 04 17:44:49 crc kubenswrapper[4631]: I1204 17:44:49.691874 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6jntd" Dec 04 17:44:49 crc kubenswrapper[4631]: I1204 17:44:49.739873 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-9d6f9bbbc-h6txj" podStartSLOduration=1.820964462 podStartE2EDuration="8.739852195s" podCreationTimestamp="2025-12-04 17:44:41 +0000 UTC" firstStartedPulling="2025-12-04 17:44:42.047732812 +0000 UTC m=+1012.079974810" lastFinishedPulling="2025-12-04 17:44:48.966620545 +0000 UTC m=+1018.998862543" observedRunningTime="2025-12-04 17:44:49.71494917 +0000 UTC m=+1019.747191168" watchObservedRunningTime="2025-12-04 17:44:49.739852195 +0000 UTC m=+1019.772094193" Dec 04 17:44:49 crc kubenswrapper[4631]: I1204 17:44:49.764112 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6jntd" Dec 04 17:44:49 crc kubenswrapper[4631]: I1204 17:44:49.955496 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6jntd"] Dec 04 17:44:51 crc kubenswrapper[4631]: I1204 17:44:51.676913 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6jntd" podUID="53cfd817-edc8-46bb-b306-beceb1afb7a2" containerName="registry-server" containerID="cri-o://3973f99c712abee160429ba84b0980303e69ff21837b48389140ba85504a9fd7" gracePeriod=2 Dec 04 17:44:52 crc kubenswrapper[4631]: I1204 17:44:52.709000 4631 generic.go:334] "Generic (PLEG): container finished" podID="53cfd817-edc8-46bb-b306-beceb1afb7a2" containerID="3973f99c712abee160429ba84b0980303e69ff21837b48389140ba85504a9fd7" exitCode=0 Dec 04 17:44:52 crc kubenswrapper[4631]: I1204 17:44:52.709507 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jntd" event={"ID":"53cfd817-edc8-46bb-b306-beceb1afb7a2","Type":"ContainerDied","Data":"3973f99c712abee160429ba84b0980303e69ff21837b48389140ba85504a9fd7"} Dec 04 17:44:52 crc kubenswrapper[4631]: I1204 17:44:52.859832 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6jntd" Dec 04 17:44:53 crc kubenswrapper[4631]: I1204 17:44:53.008301 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53cfd817-edc8-46bb-b306-beceb1afb7a2-catalog-content\") pod \"53cfd817-edc8-46bb-b306-beceb1afb7a2\" (UID: \"53cfd817-edc8-46bb-b306-beceb1afb7a2\") " Dec 04 17:44:53 crc kubenswrapper[4631]: I1204 17:44:53.008356 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dnzmn\" (UniqueName: \"kubernetes.io/projected/53cfd817-edc8-46bb-b306-beceb1afb7a2-kube-api-access-dnzmn\") pod \"53cfd817-edc8-46bb-b306-beceb1afb7a2\" (UID: \"53cfd817-edc8-46bb-b306-beceb1afb7a2\") " Dec 04 17:44:53 crc kubenswrapper[4631]: I1204 17:44:53.008397 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53cfd817-edc8-46bb-b306-beceb1afb7a2-utilities\") pod \"53cfd817-edc8-46bb-b306-beceb1afb7a2\" (UID: \"53cfd817-edc8-46bb-b306-beceb1afb7a2\") " Dec 04 17:44:53 crc kubenswrapper[4631]: I1204 17:44:53.110833 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53cfd817-edc8-46bb-b306-beceb1afb7a2-utilities" (OuterVolumeSpecName: "utilities") pod "53cfd817-edc8-46bb-b306-beceb1afb7a2" (UID: "53cfd817-edc8-46bb-b306-beceb1afb7a2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:44:53 crc kubenswrapper[4631]: I1204 17:44:53.126111 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53cfd817-edc8-46bb-b306-beceb1afb7a2-kube-api-access-dnzmn" (OuterVolumeSpecName: "kube-api-access-dnzmn") pod "53cfd817-edc8-46bb-b306-beceb1afb7a2" (UID: "53cfd817-edc8-46bb-b306-beceb1afb7a2"). InnerVolumeSpecName "kube-api-access-dnzmn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:44:53 crc kubenswrapper[4631]: I1204 17:44:53.169660 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53cfd817-edc8-46bb-b306-beceb1afb7a2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "53cfd817-edc8-46bb-b306-beceb1afb7a2" (UID: "53cfd817-edc8-46bb-b306-beceb1afb7a2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:44:53 crc kubenswrapper[4631]: I1204 17:44:53.211521 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53cfd817-edc8-46bb-b306-beceb1afb7a2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 17:44:53 crc kubenswrapper[4631]: I1204 17:44:53.211558 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dnzmn\" (UniqueName: \"kubernetes.io/projected/53cfd817-edc8-46bb-b306-beceb1afb7a2-kube-api-access-dnzmn\") on node \"crc\" DevicePath \"\"" Dec 04 17:44:53 crc kubenswrapper[4631]: I1204 17:44:53.211588 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53cfd817-edc8-46bb-b306-beceb1afb7a2-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 17:44:53 crc kubenswrapper[4631]: I1204 17:44:53.718385 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jntd" event={"ID":"53cfd817-edc8-46bb-b306-beceb1afb7a2","Type":"ContainerDied","Data":"43da41b7512c044e6d32dd65c721edd5343ea39352e985a6997868d6e2f541f9"} Dec 04 17:44:53 crc kubenswrapper[4631]: I1204 17:44:53.718852 4631 scope.go:117] "RemoveContainer" containerID="3973f99c712abee160429ba84b0980303e69ff21837b48389140ba85504a9fd7" Dec 04 17:44:53 crc kubenswrapper[4631]: I1204 17:44:53.718642 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6jntd" Dec 04 17:44:53 crc kubenswrapper[4631]: I1204 17:44:53.739424 4631 scope.go:117] "RemoveContainer" containerID="96528cece5fa2179f392ffbf0c453d0a053320862a7586f1343ac10d282fdf55" Dec 04 17:44:53 crc kubenswrapper[4631]: I1204 17:44:53.761977 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6jntd"] Dec 04 17:44:53 crc kubenswrapper[4631]: I1204 17:44:53.762387 4631 scope.go:117] "RemoveContainer" containerID="e2a435cc17b8c1ced0a29a8a1f99b1514188a02a023f9f7fedaf97a795492422" Dec 04 17:44:53 crc kubenswrapper[4631]: I1204 17:44:53.779509 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6jntd"] Dec 04 17:44:54 crc kubenswrapper[4631]: I1204 17:44:54.249714 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53cfd817-edc8-46bb-b306-beceb1afb7a2" path="/var/lib/kubelet/pods/53cfd817-edc8-46bb-b306-beceb1afb7a2/volumes" Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.150916 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf"] Dec 04 17:45:00 crc kubenswrapper[4631]: E1204 17:45:00.153206 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53cfd817-edc8-46bb-b306-beceb1afb7a2" containerName="extract-content" Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.153303 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="53cfd817-edc8-46bb-b306-beceb1afb7a2" containerName="extract-content" Dec 04 17:45:00 crc kubenswrapper[4631]: E1204 17:45:00.153415 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53cfd817-edc8-46bb-b306-beceb1afb7a2" containerName="extract-utilities" Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.153506 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="53cfd817-edc8-46bb-b306-beceb1afb7a2" containerName="extract-utilities" Dec 04 17:45:00 crc kubenswrapper[4631]: E1204 17:45:00.153579 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53cfd817-edc8-46bb-b306-beceb1afb7a2" containerName="registry-server" Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.153642 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="53cfd817-edc8-46bb-b306-beceb1afb7a2" containerName="registry-server" Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.153804 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="53cfd817-edc8-46bb-b306-beceb1afb7a2" containerName="registry-server" Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.154361 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf" Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.157191 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.157187 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.168359 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf"] Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.301433 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2428162f-1bea-4dc0-8a79-37c806367a59-config-volume\") pod \"collect-profiles-29414505-mm4zf\" (UID: \"2428162f-1bea-4dc0-8a79-37c806367a59\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf" Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.301848 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fj82s\" (UniqueName: \"kubernetes.io/projected/2428162f-1bea-4dc0-8a79-37c806367a59-kube-api-access-fj82s\") pod \"collect-profiles-29414505-mm4zf\" (UID: \"2428162f-1bea-4dc0-8a79-37c806367a59\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf" Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.301919 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2428162f-1bea-4dc0-8a79-37c806367a59-secret-volume\") pod \"collect-profiles-29414505-mm4zf\" (UID: \"2428162f-1bea-4dc0-8a79-37c806367a59\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf" Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.403255 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fj82s\" (UniqueName: \"kubernetes.io/projected/2428162f-1bea-4dc0-8a79-37c806367a59-kube-api-access-fj82s\") pod \"collect-profiles-29414505-mm4zf\" (UID: \"2428162f-1bea-4dc0-8a79-37c806367a59\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf" Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.403315 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2428162f-1bea-4dc0-8a79-37c806367a59-secret-volume\") pod \"collect-profiles-29414505-mm4zf\" (UID: \"2428162f-1bea-4dc0-8a79-37c806367a59\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf" Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.403361 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2428162f-1bea-4dc0-8a79-37c806367a59-config-volume\") pod \"collect-profiles-29414505-mm4zf\" (UID: \"2428162f-1bea-4dc0-8a79-37c806367a59\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf" Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.404627 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2428162f-1bea-4dc0-8a79-37c806367a59-config-volume\") pod \"collect-profiles-29414505-mm4zf\" (UID: \"2428162f-1bea-4dc0-8a79-37c806367a59\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf" Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.412755 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2428162f-1bea-4dc0-8a79-37c806367a59-secret-volume\") pod \"collect-profiles-29414505-mm4zf\" (UID: \"2428162f-1bea-4dc0-8a79-37c806367a59\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf" Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.427083 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fj82s\" (UniqueName: \"kubernetes.io/projected/2428162f-1bea-4dc0-8a79-37c806367a59-kube-api-access-fj82s\") pod \"collect-profiles-29414505-mm4zf\" (UID: \"2428162f-1bea-4dc0-8a79-37c806367a59\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf" Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.477610 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf" Dec 04 17:45:00 crc kubenswrapper[4631]: I1204 17:45:00.925271 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf"] Dec 04 17:45:00 crc kubenswrapper[4631]: W1204 17:45:00.945796 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2428162f_1bea_4dc0_8a79_37c806367a59.slice/crio-d1829be2cedc080be891c098c3b098a17f2a1029299cd4143d56619f97b3b19c WatchSource:0}: Error finding container d1829be2cedc080be891c098c3b098a17f2a1029299cd4143d56619f97b3b19c: Status 404 returned error can't find the container with id d1829be2cedc080be891c098c3b098a17f2a1029299cd4143d56619f97b3b19c Dec 04 17:45:01 crc kubenswrapper[4631]: I1204 17:45:01.764432 4631 generic.go:334] "Generic (PLEG): container finished" podID="2428162f-1bea-4dc0-8a79-37c806367a59" containerID="8edff1bd06f718da48a64d6b525bdd0775daed1fcb764afaceb6a37c03d0f9e2" exitCode=0 Dec 04 17:45:01 crc kubenswrapper[4631]: I1204 17:45:01.764477 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf" event={"ID":"2428162f-1bea-4dc0-8a79-37c806367a59","Type":"ContainerDied","Data":"8edff1bd06f718da48a64d6b525bdd0775daed1fcb764afaceb6a37c03d0f9e2"} Dec 04 17:45:01 crc kubenswrapper[4631]: I1204 17:45:01.764503 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf" event={"ID":"2428162f-1bea-4dc0-8a79-37c806367a59","Type":"ContainerStarted","Data":"d1829be2cedc080be891c098c3b098a17f2a1029299cd4143d56619f97b3b19c"} Dec 04 17:45:01 crc kubenswrapper[4631]: I1204 17:45:01.884873 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6dfc44c866-psc9v" Dec 04 17:45:03 crc kubenswrapper[4631]: I1204 17:45:03.215456 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf" Dec 04 17:45:03 crc kubenswrapper[4631]: I1204 17:45:03.366510 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2428162f-1bea-4dc0-8a79-37c806367a59-secret-volume\") pod \"2428162f-1bea-4dc0-8a79-37c806367a59\" (UID: \"2428162f-1bea-4dc0-8a79-37c806367a59\") " Dec 04 17:45:03 crc kubenswrapper[4631]: I1204 17:45:03.366581 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2428162f-1bea-4dc0-8a79-37c806367a59-config-volume\") pod \"2428162f-1bea-4dc0-8a79-37c806367a59\" (UID: \"2428162f-1bea-4dc0-8a79-37c806367a59\") " Dec 04 17:45:03 crc kubenswrapper[4631]: I1204 17:45:03.366632 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fj82s\" (UniqueName: \"kubernetes.io/projected/2428162f-1bea-4dc0-8a79-37c806367a59-kube-api-access-fj82s\") pod \"2428162f-1bea-4dc0-8a79-37c806367a59\" (UID: \"2428162f-1bea-4dc0-8a79-37c806367a59\") " Dec 04 17:45:03 crc kubenswrapper[4631]: I1204 17:45:03.367854 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2428162f-1bea-4dc0-8a79-37c806367a59-config-volume" (OuterVolumeSpecName: "config-volume") pod "2428162f-1bea-4dc0-8a79-37c806367a59" (UID: "2428162f-1bea-4dc0-8a79-37c806367a59"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:45:03 crc kubenswrapper[4631]: I1204 17:45:03.375609 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2428162f-1bea-4dc0-8a79-37c806367a59-kube-api-access-fj82s" (OuterVolumeSpecName: "kube-api-access-fj82s") pod "2428162f-1bea-4dc0-8a79-37c806367a59" (UID: "2428162f-1bea-4dc0-8a79-37c806367a59"). InnerVolumeSpecName "kube-api-access-fj82s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:45:03 crc kubenswrapper[4631]: I1204 17:45:03.379598 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2428162f-1bea-4dc0-8a79-37c806367a59-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2428162f-1bea-4dc0-8a79-37c806367a59" (UID: "2428162f-1bea-4dc0-8a79-37c806367a59"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:45:03 crc kubenswrapper[4631]: I1204 17:45:03.467356 4631 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2428162f-1bea-4dc0-8a79-37c806367a59-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 04 17:45:03 crc kubenswrapper[4631]: I1204 17:45:03.467418 4631 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2428162f-1bea-4dc0-8a79-37c806367a59-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 17:45:03 crc kubenswrapper[4631]: I1204 17:45:03.467431 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fj82s\" (UniqueName: \"kubernetes.io/projected/2428162f-1bea-4dc0-8a79-37c806367a59-kube-api-access-fj82s\") on node \"crc\" DevicePath \"\"" Dec 04 17:45:03 crc kubenswrapper[4631]: I1204 17:45:03.778019 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf" event={"ID":"2428162f-1bea-4dc0-8a79-37c806367a59","Type":"ContainerDied","Data":"d1829be2cedc080be891c098c3b098a17f2a1029299cd4143d56619f97b3b19c"} Dec 04 17:45:03 crc kubenswrapper[4631]: I1204 17:45:03.778483 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d1829be2cedc080be891c098c3b098a17f2a1029299cd4143d56619f97b3b19c" Dec 04 17:45:03 crc kubenswrapper[4631]: I1204 17:45:03.778074 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf" Dec 04 17:45:21 crc kubenswrapper[4631]: I1204 17:45:21.634518 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-9d6f9bbbc-h6txj" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.655667 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-d7bk6"] Dec 04 17:45:22 crc kubenswrapper[4631]: E1204 17:45:22.656417 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2428162f-1bea-4dc0-8a79-37c806367a59" containerName="collect-profiles" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.656434 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2428162f-1bea-4dc0-8a79-37c806367a59" containerName="collect-profiles" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.656551 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="2428162f-1bea-4dc0-8a79-37c806367a59" containerName="collect-profiles" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.658445 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.665768 4631 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.665912 4631 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-8gwc2" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.665879 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-rpshr"] Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.665865 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.667038 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rpshr" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.675436 4631 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.684007 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-rpshr"] Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.757332 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjk7d\" (UniqueName: \"kubernetes.io/projected/ac0998d1-a266-4aeb-9af6-a18659dea142-kube-api-access-cjk7d\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.757387 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/ac0998d1-a266-4aeb-9af6-a18659dea142-metrics\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.757424 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/ac0998d1-a266-4aeb-9af6-a18659dea142-frr-sockets\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.757464 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vmgz\" (UniqueName: \"kubernetes.io/projected/78c76818-4dc3-4a33-b105-f8194a1cde60-kube-api-access-9vmgz\") pod \"frr-k8s-webhook-server-7fcb986d4-rpshr\" (UID: \"78c76818-4dc3-4a33-b105-f8194a1cde60\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rpshr" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.757491 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/78c76818-4dc3-4a33-b105-f8194a1cde60-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-rpshr\" (UID: \"78c76818-4dc3-4a33-b105-f8194a1cde60\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rpshr" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.757513 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/ac0998d1-a266-4aeb-9af6-a18659dea142-frr-startup\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.757543 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/ac0998d1-a266-4aeb-9af6-a18659dea142-reloader\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.757572 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/ac0998d1-a266-4aeb-9af6-a18659dea142-frr-conf\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.757597 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ac0998d1-a266-4aeb-9af6-a18659dea142-metrics-certs\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.773233 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-45ph8"] Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.774143 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-45ph8" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.776579 4631 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.776734 4631 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.776847 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.776969 4631 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-wb2wn" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.798007 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-cpxkk"] Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.799138 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-cpxkk" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.800775 4631 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.819682 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-cpxkk"] Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.858731 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/ac0998d1-a266-4aeb-9af6-a18659dea142-frr-sockets\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.858779 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vmgz\" (UniqueName: \"kubernetes.io/projected/78c76818-4dc3-4a33-b105-f8194a1cde60-kube-api-access-9vmgz\") pod \"frr-k8s-webhook-server-7fcb986d4-rpshr\" (UID: \"78c76818-4dc3-4a33-b105-f8194a1cde60\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rpshr" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.858831 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/78c76818-4dc3-4a33-b105-f8194a1cde60-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-rpshr\" (UID: \"78c76818-4dc3-4a33-b105-f8194a1cde60\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rpshr" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.858866 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/ac0998d1-a266-4aeb-9af6-a18659dea142-frr-startup\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.858888 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/ac0998d1-a266-4aeb-9af6-a18659dea142-reloader\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.858917 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/ac0998d1-a266-4aeb-9af6-a18659dea142-frr-conf\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.858945 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ac0998d1-a266-4aeb-9af6-a18659dea142-metrics-certs\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.858985 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjk7d\" (UniqueName: \"kubernetes.io/projected/ac0998d1-a266-4aeb-9af6-a18659dea142-kube-api-access-cjk7d\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.859002 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/ac0998d1-a266-4aeb-9af6-a18659dea142-metrics\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: E1204 17:45:22.859288 4631 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Dec 04 17:45:22 crc kubenswrapper[4631]: E1204 17:45:22.859391 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/78c76818-4dc3-4a33-b105-f8194a1cde60-cert podName:78c76818-4dc3-4a33-b105-f8194a1cde60 nodeName:}" failed. No retries permitted until 2025-12-04 17:45:23.359364323 +0000 UTC m=+1053.391606321 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/78c76818-4dc3-4a33-b105-f8194a1cde60-cert") pod "frr-k8s-webhook-server-7fcb986d4-rpshr" (UID: "78c76818-4dc3-4a33-b105-f8194a1cde60") : secret "frr-k8s-webhook-server-cert" not found Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.859419 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/ac0998d1-a266-4aeb-9af6-a18659dea142-metrics\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: E1204 17:45:22.859417 4631 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.859483 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/ac0998d1-a266-4aeb-9af6-a18659dea142-frr-conf\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: E1204 17:45:22.859488 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ac0998d1-a266-4aeb-9af6-a18659dea142-metrics-certs podName:ac0998d1-a266-4aeb-9af6-a18659dea142 nodeName:}" failed. No retries permitted until 2025-12-04 17:45:23.359471816 +0000 UTC m=+1053.391713814 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ac0998d1-a266-4aeb-9af6-a18659dea142-metrics-certs") pod "frr-k8s-d7bk6" (UID: "ac0998d1-a266-4aeb-9af6-a18659dea142") : secret "frr-k8s-certs-secret" not found Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.859308 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/ac0998d1-a266-4aeb-9af6-a18659dea142-frr-sockets\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.859678 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/ac0998d1-a266-4aeb-9af6-a18659dea142-reloader\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.860202 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/ac0998d1-a266-4aeb-9af6-a18659dea142-frr-startup\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.881323 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjk7d\" (UniqueName: \"kubernetes.io/projected/ac0998d1-a266-4aeb-9af6-a18659dea142-kube-api-access-cjk7d\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.881632 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vmgz\" (UniqueName: \"kubernetes.io/projected/78c76818-4dc3-4a33-b105-f8194a1cde60-kube-api-access-9vmgz\") pod \"frr-k8s-webhook-server-7fcb986d4-rpshr\" (UID: \"78c76818-4dc3-4a33-b105-f8194a1cde60\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rpshr" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.960636 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/025cf6e6-5d36-4973-bac3-7cd1046ddeea-metrics-certs\") pod \"speaker-45ph8\" (UID: \"025cf6e6-5d36-4973-bac3-7cd1046ddeea\") " pod="metallb-system/speaker-45ph8" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.960698 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7531a7c8-09d0-470e-b530-227bff4a6659-cert\") pod \"controller-f8648f98b-cpxkk\" (UID: \"7531a7c8-09d0-470e-b530-227bff4a6659\") " pod="metallb-system/controller-f8648f98b-cpxkk" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.960820 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88gsc\" (UniqueName: \"kubernetes.io/projected/025cf6e6-5d36-4973-bac3-7cd1046ddeea-kube-api-access-88gsc\") pod \"speaker-45ph8\" (UID: \"025cf6e6-5d36-4973-bac3-7cd1046ddeea\") " pod="metallb-system/speaker-45ph8" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.960843 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/025cf6e6-5d36-4973-bac3-7cd1046ddeea-memberlist\") pod \"speaker-45ph8\" (UID: \"025cf6e6-5d36-4973-bac3-7cd1046ddeea\") " pod="metallb-system/speaker-45ph8" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.960860 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/025cf6e6-5d36-4973-bac3-7cd1046ddeea-metallb-excludel2\") pod \"speaker-45ph8\" (UID: \"025cf6e6-5d36-4973-bac3-7cd1046ddeea\") " pod="metallb-system/speaker-45ph8" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.960879 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7531a7c8-09d0-470e-b530-227bff4a6659-metrics-certs\") pod \"controller-f8648f98b-cpxkk\" (UID: \"7531a7c8-09d0-470e-b530-227bff4a6659\") " pod="metallb-system/controller-f8648f98b-cpxkk" Dec 04 17:45:22 crc kubenswrapper[4631]: I1204 17:45:22.961027 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25btg\" (UniqueName: \"kubernetes.io/projected/7531a7c8-09d0-470e-b530-227bff4a6659-kube-api-access-25btg\") pod \"controller-f8648f98b-cpxkk\" (UID: \"7531a7c8-09d0-470e-b530-227bff4a6659\") " pod="metallb-system/controller-f8648f98b-cpxkk" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.062764 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7531a7c8-09d0-470e-b530-227bff4a6659-cert\") pod \"controller-f8648f98b-cpxkk\" (UID: \"7531a7c8-09d0-470e-b530-227bff4a6659\") " pod="metallb-system/controller-f8648f98b-cpxkk" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.062852 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88gsc\" (UniqueName: \"kubernetes.io/projected/025cf6e6-5d36-4973-bac3-7cd1046ddeea-kube-api-access-88gsc\") pod \"speaker-45ph8\" (UID: \"025cf6e6-5d36-4973-bac3-7cd1046ddeea\") " pod="metallb-system/speaker-45ph8" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.062873 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/025cf6e6-5d36-4973-bac3-7cd1046ddeea-metallb-excludel2\") pod \"speaker-45ph8\" (UID: \"025cf6e6-5d36-4973-bac3-7cd1046ddeea\") " pod="metallb-system/speaker-45ph8" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.062892 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/025cf6e6-5d36-4973-bac3-7cd1046ddeea-memberlist\") pod \"speaker-45ph8\" (UID: \"025cf6e6-5d36-4973-bac3-7cd1046ddeea\") " pod="metallb-system/speaker-45ph8" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.062912 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7531a7c8-09d0-470e-b530-227bff4a6659-metrics-certs\") pod \"controller-f8648f98b-cpxkk\" (UID: \"7531a7c8-09d0-470e-b530-227bff4a6659\") " pod="metallb-system/controller-f8648f98b-cpxkk" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.062949 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25btg\" (UniqueName: \"kubernetes.io/projected/7531a7c8-09d0-470e-b530-227bff4a6659-kube-api-access-25btg\") pod \"controller-f8648f98b-cpxkk\" (UID: \"7531a7c8-09d0-470e-b530-227bff4a6659\") " pod="metallb-system/controller-f8648f98b-cpxkk" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.062981 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/025cf6e6-5d36-4973-bac3-7cd1046ddeea-metrics-certs\") pod \"speaker-45ph8\" (UID: \"025cf6e6-5d36-4973-bac3-7cd1046ddeea\") " pod="metallb-system/speaker-45ph8" Dec 04 17:45:23 crc kubenswrapper[4631]: E1204 17:45:23.063292 4631 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 04 17:45:23 crc kubenswrapper[4631]: E1204 17:45:23.063348 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/025cf6e6-5d36-4973-bac3-7cd1046ddeea-memberlist podName:025cf6e6-5d36-4973-bac3-7cd1046ddeea nodeName:}" failed. No retries permitted until 2025-12-04 17:45:23.563332995 +0000 UTC m=+1053.595574993 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/025cf6e6-5d36-4973-bac3-7cd1046ddeea-memberlist") pod "speaker-45ph8" (UID: "025cf6e6-5d36-4973-bac3-7cd1046ddeea") : secret "metallb-memberlist" not found Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.063963 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/025cf6e6-5d36-4973-bac3-7cd1046ddeea-metallb-excludel2\") pod \"speaker-45ph8\" (UID: \"025cf6e6-5d36-4973-bac3-7cd1046ddeea\") " pod="metallb-system/speaker-45ph8" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.065800 4631 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.069135 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7531a7c8-09d0-470e-b530-227bff4a6659-metrics-certs\") pod \"controller-f8648f98b-cpxkk\" (UID: \"7531a7c8-09d0-470e-b530-227bff4a6659\") " pod="metallb-system/controller-f8648f98b-cpxkk" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.078018 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7531a7c8-09d0-470e-b530-227bff4a6659-cert\") pod \"controller-f8648f98b-cpxkk\" (UID: \"7531a7c8-09d0-470e-b530-227bff4a6659\") " pod="metallb-system/controller-f8648f98b-cpxkk" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.078432 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/025cf6e6-5d36-4973-bac3-7cd1046ddeea-metrics-certs\") pod \"speaker-45ph8\" (UID: \"025cf6e6-5d36-4973-bac3-7cd1046ddeea\") " pod="metallb-system/speaker-45ph8" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.087229 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25btg\" (UniqueName: \"kubernetes.io/projected/7531a7c8-09d0-470e-b530-227bff4a6659-kube-api-access-25btg\") pod \"controller-f8648f98b-cpxkk\" (UID: \"7531a7c8-09d0-470e-b530-227bff4a6659\") " pod="metallb-system/controller-f8648f98b-cpxkk" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.098159 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88gsc\" (UniqueName: \"kubernetes.io/projected/025cf6e6-5d36-4973-bac3-7cd1046ddeea-kube-api-access-88gsc\") pod \"speaker-45ph8\" (UID: \"025cf6e6-5d36-4973-bac3-7cd1046ddeea\") " pod="metallb-system/speaker-45ph8" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.116512 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-cpxkk" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.351076 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-cpxkk"] Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.367967 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/78c76818-4dc3-4a33-b105-f8194a1cde60-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-rpshr\" (UID: \"78c76818-4dc3-4a33-b105-f8194a1cde60\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rpshr" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.368059 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ac0998d1-a266-4aeb-9af6-a18659dea142-metrics-certs\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.374289 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ac0998d1-a266-4aeb-9af6-a18659dea142-metrics-certs\") pod \"frr-k8s-d7bk6\" (UID: \"ac0998d1-a266-4aeb-9af6-a18659dea142\") " pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.374350 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/78c76818-4dc3-4a33-b105-f8194a1cde60-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-rpshr\" (UID: \"78c76818-4dc3-4a33-b105-f8194a1cde60\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rpshr" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.571262 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/025cf6e6-5d36-4973-bac3-7cd1046ddeea-memberlist\") pod \"speaker-45ph8\" (UID: \"025cf6e6-5d36-4973-bac3-7cd1046ddeea\") " pod="metallb-system/speaker-45ph8" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.575349 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/025cf6e6-5d36-4973-bac3-7cd1046ddeea-memberlist\") pod \"speaker-45ph8\" (UID: \"025cf6e6-5d36-4973-bac3-7cd1046ddeea\") " pod="metallb-system/speaker-45ph8" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.586838 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.595149 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rpshr" Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.688880 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-45ph8" Dec 04 17:45:23 crc kubenswrapper[4631]: W1204 17:45:23.712819 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod025cf6e6_5d36_4973_bac3_7cd1046ddeea.slice/crio-39e12908edbc753e29e71449f26f03c07a48244e8c4357aa82242774fe36d9d6 WatchSource:0}: Error finding container 39e12908edbc753e29e71449f26f03c07a48244e8c4357aa82242774fe36d9d6: Status 404 returned error can't find the container with id 39e12908edbc753e29e71449f26f03c07a48244e8c4357aa82242774fe36d9d6 Dec 04 17:45:23 crc kubenswrapper[4631]: I1204 17:45:23.845213 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-rpshr"] Dec 04 17:45:24 crc kubenswrapper[4631]: I1204 17:45:24.065105 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rpshr" event={"ID":"78c76818-4dc3-4a33-b105-f8194a1cde60","Type":"ContainerStarted","Data":"76a9ab9222a15bd26556d1f812e593946e99e6ee756270a5a615aba96e235f87"} Dec 04 17:45:24 crc kubenswrapper[4631]: I1204 17:45:24.068090 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-cpxkk" event={"ID":"7531a7c8-09d0-470e-b530-227bff4a6659","Type":"ContainerStarted","Data":"98163b7cac158d7394e3b4243ebb3bae6a26df7bf6a1177d8052b0fb06a57dd1"} Dec 04 17:45:24 crc kubenswrapper[4631]: I1204 17:45:24.068156 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-cpxkk" event={"ID":"7531a7c8-09d0-470e-b530-227bff4a6659","Type":"ContainerStarted","Data":"47026ce7cd701a44e33dd1ab7dee33b1db103c69f3db3d18959afdffc286b326"} Dec 04 17:45:24 crc kubenswrapper[4631]: I1204 17:45:24.068171 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-cpxkk" event={"ID":"7531a7c8-09d0-470e-b530-227bff4a6659","Type":"ContainerStarted","Data":"904b65f2975c8b75db72030ad76b3806838932c87a6c6984f74f6f86f800a9a5"} Dec 04 17:45:24 crc kubenswrapper[4631]: I1204 17:45:24.069436 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-cpxkk" Dec 04 17:45:24 crc kubenswrapper[4631]: I1204 17:45:24.070511 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-45ph8" event={"ID":"025cf6e6-5d36-4973-bac3-7cd1046ddeea","Type":"ContainerStarted","Data":"39e12908edbc753e29e71449f26f03c07a48244e8c4357aa82242774fe36d9d6"} Dec 04 17:45:24 crc kubenswrapper[4631]: I1204 17:45:24.075300 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-d7bk6" event={"ID":"ac0998d1-a266-4aeb-9af6-a18659dea142","Type":"ContainerStarted","Data":"a6d73a58b3d01f82382ac5797a9a9201eb511c1f920a3fcdc60b2345232b0f97"} Dec 04 17:45:25 crc kubenswrapper[4631]: I1204 17:45:25.084396 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-45ph8" event={"ID":"025cf6e6-5d36-4973-bac3-7cd1046ddeea","Type":"ContainerStarted","Data":"0af5b3103434ab05a1203b48a17b3ac60801515d254ce7707f18f98b2d043e2b"} Dec 04 17:45:25 crc kubenswrapper[4631]: I1204 17:45:25.084858 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-45ph8" Dec 04 17:45:25 crc kubenswrapper[4631]: I1204 17:45:25.084870 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-45ph8" event={"ID":"025cf6e6-5d36-4973-bac3-7cd1046ddeea","Type":"ContainerStarted","Data":"a5836a6af225327e7570602c3a12e775340990870168d36c13110c84714ba569"} Dec 04 17:45:25 crc kubenswrapper[4631]: I1204 17:45:25.102497 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-45ph8" podStartSLOduration=3.102476915 podStartE2EDuration="3.102476915s" podCreationTimestamp="2025-12-04 17:45:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:45:25.097143482 +0000 UTC m=+1055.129385500" watchObservedRunningTime="2025-12-04 17:45:25.102476915 +0000 UTC m=+1055.134718923" Dec 04 17:45:25 crc kubenswrapper[4631]: I1204 17:45:25.103860 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-cpxkk" podStartSLOduration=3.103852935 podStartE2EDuration="3.103852935s" podCreationTimestamp="2025-12-04 17:45:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:45:24.093904706 +0000 UTC m=+1054.126146714" watchObservedRunningTime="2025-12-04 17:45:25.103852935 +0000 UTC m=+1055.136094933" Dec 04 17:45:26 crc kubenswrapper[4631]: I1204 17:45:26.304931 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wmzct"] Dec 04 17:45:26 crc kubenswrapper[4631]: I1204 17:45:26.306050 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wmzct" Dec 04 17:45:26 crc kubenswrapper[4631]: I1204 17:45:26.331194 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wmzct"] Dec 04 17:45:26 crc kubenswrapper[4631]: I1204 17:45:26.415656 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7wng\" (UniqueName: \"kubernetes.io/projected/f656e14c-d494-445c-bcf0-25e456d84ad0-kube-api-access-q7wng\") pod \"certified-operators-wmzct\" (UID: \"f656e14c-d494-445c-bcf0-25e456d84ad0\") " pod="openshift-marketplace/certified-operators-wmzct" Dec 04 17:45:26 crc kubenswrapper[4631]: I1204 17:45:26.415761 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f656e14c-d494-445c-bcf0-25e456d84ad0-catalog-content\") pod \"certified-operators-wmzct\" (UID: \"f656e14c-d494-445c-bcf0-25e456d84ad0\") " pod="openshift-marketplace/certified-operators-wmzct" Dec 04 17:45:26 crc kubenswrapper[4631]: I1204 17:45:26.415797 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f656e14c-d494-445c-bcf0-25e456d84ad0-utilities\") pod \"certified-operators-wmzct\" (UID: \"f656e14c-d494-445c-bcf0-25e456d84ad0\") " pod="openshift-marketplace/certified-operators-wmzct" Dec 04 17:45:26 crc kubenswrapper[4631]: I1204 17:45:26.517268 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f656e14c-d494-445c-bcf0-25e456d84ad0-utilities\") pod \"certified-operators-wmzct\" (UID: \"f656e14c-d494-445c-bcf0-25e456d84ad0\") " pod="openshift-marketplace/certified-operators-wmzct" Dec 04 17:45:26 crc kubenswrapper[4631]: I1204 17:45:26.517769 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f656e14c-d494-445c-bcf0-25e456d84ad0-utilities\") pod \"certified-operators-wmzct\" (UID: \"f656e14c-d494-445c-bcf0-25e456d84ad0\") " pod="openshift-marketplace/certified-operators-wmzct" Dec 04 17:45:26 crc kubenswrapper[4631]: I1204 17:45:26.517906 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7wng\" (UniqueName: \"kubernetes.io/projected/f656e14c-d494-445c-bcf0-25e456d84ad0-kube-api-access-q7wng\") pod \"certified-operators-wmzct\" (UID: \"f656e14c-d494-445c-bcf0-25e456d84ad0\") " pod="openshift-marketplace/certified-operators-wmzct" Dec 04 17:45:26 crc kubenswrapper[4631]: I1204 17:45:26.518233 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f656e14c-d494-445c-bcf0-25e456d84ad0-catalog-content\") pod \"certified-operators-wmzct\" (UID: \"f656e14c-d494-445c-bcf0-25e456d84ad0\") " pod="openshift-marketplace/certified-operators-wmzct" Dec 04 17:45:26 crc kubenswrapper[4631]: I1204 17:45:26.518520 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f656e14c-d494-445c-bcf0-25e456d84ad0-catalog-content\") pod \"certified-operators-wmzct\" (UID: \"f656e14c-d494-445c-bcf0-25e456d84ad0\") " pod="openshift-marketplace/certified-operators-wmzct" Dec 04 17:45:26 crc kubenswrapper[4631]: I1204 17:45:26.543994 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7wng\" (UniqueName: \"kubernetes.io/projected/f656e14c-d494-445c-bcf0-25e456d84ad0-kube-api-access-q7wng\") pod \"certified-operators-wmzct\" (UID: \"f656e14c-d494-445c-bcf0-25e456d84ad0\") " pod="openshift-marketplace/certified-operators-wmzct" Dec 04 17:45:26 crc kubenswrapper[4631]: I1204 17:45:26.624154 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wmzct" Dec 04 17:45:27 crc kubenswrapper[4631]: I1204 17:45:27.262587 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wmzct"] Dec 04 17:45:27 crc kubenswrapper[4631]: W1204 17:45:27.292427 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf656e14c_d494_445c_bcf0_25e456d84ad0.slice/crio-bec7b5ac53d366aa3eac6c0102f885bf44adc6da2b9cd75376cb0b481b4e17af WatchSource:0}: Error finding container bec7b5ac53d366aa3eac6c0102f885bf44adc6da2b9cd75376cb0b481b4e17af: Status 404 returned error can't find the container with id bec7b5ac53d366aa3eac6c0102f885bf44adc6da2b9cd75376cb0b481b4e17af Dec 04 17:45:28 crc kubenswrapper[4631]: I1204 17:45:28.108809 4631 generic.go:334] "Generic (PLEG): container finished" podID="f656e14c-d494-445c-bcf0-25e456d84ad0" containerID="b2f1198c5bb2ea6af8eae1f6f3166d4f45ec26527b5c8dee0bf709948ad19439" exitCode=0 Dec 04 17:45:28 crc kubenswrapper[4631]: I1204 17:45:28.109280 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmzct" event={"ID":"f656e14c-d494-445c-bcf0-25e456d84ad0","Type":"ContainerDied","Data":"b2f1198c5bb2ea6af8eae1f6f3166d4f45ec26527b5c8dee0bf709948ad19439"} Dec 04 17:45:28 crc kubenswrapper[4631]: I1204 17:45:28.109313 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmzct" event={"ID":"f656e14c-d494-445c-bcf0-25e456d84ad0","Type":"ContainerStarted","Data":"bec7b5ac53d366aa3eac6c0102f885bf44adc6da2b9cd75376cb0b481b4e17af"} Dec 04 17:45:29 crc kubenswrapper[4631]: I1204 17:45:29.116968 4631 generic.go:334] "Generic (PLEG): container finished" podID="f656e14c-d494-445c-bcf0-25e456d84ad0" containerID="a57c16bddd880ee81821a4596733c4d236a85c82583c25ab5c5320aeedd94ce3" exitCode=0 Dec 04 17:45:29 crc kubenswrapper[4631]: I1204 17:45:29.117065 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmzct" event={"ID":"f656e14c-d494-445c-bcf0-25e456d84ad0","Type":"ContainerDied","Data":"a57c16bddd880ee81821a4596733c4d236a85c82583c25ab5c5320aeedd94ce3"} Dec 04 17:45:33 crc kubenswrapper[4631]: I1204 17:45:33.122837 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-cpxkk" Dec 04 17:45:33 crc kubenswrapper[4631]: I1204 17:45:33.150954 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmzct" event={"ID":"f656e14c-d494-445c-bcf0-25e456d84ad0","Type":"ContainerStarted","Data":"cbae908c7ec4445bdfa79f0bb97a295754571b766fb6a4b8a9daeec6e7df9b35"} Dec 04 17:45:33 crc kubenswrapper[4631]: I1204 17:45:33.152276 4631 generic.go:334] "Generic (PLEG): container finished" podID="ac0998d1-a266-4aeb-9af6-a18659dea142" containerID="c99e5f69644f7cd5848c947dc067ab6adcdca7151a93b67089a3980d9ce5030c" exitCode=0 Dec 04 17:45:33 crc kubenswrapper[4631]: I1204 17:45:33.152339 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-d7bk6" event={"ID":"ac0998d1-a266-4aeb-9af6-a18659dea142","Type":"ContainerDied","Data":"c99e5f69644f7cd5848c947dc067ab6adcdca7151a93b67089a3980d9ce5030c"} Dec 04 17:45:33 crc kubenswrapper[4631]: I1204 17:45:33.160640 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rpshr" event={"ID":"78c76818-4dc3-4a33-b105-f8194a1cde60","Type":"ContainerStarted","Data":"7e47f0d1cb3583446baa39fc8ee938c57691b5012996483db0bc897d226f462d"} Dec 04 17:45:33 crc kubenswrapper[4631]: I1204 17:45:33.161289 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rpshr" Dec 04 17:45:33 crc kubenswrapper[4631]: I1204 17:45:33.179222 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wmzct" podStartSLOduration=3.006112889 podStartE2EDuration="7.17920276s" podCreationTimestamp="2025-12-04 17:45:26 +0000 UTC" firstStartedPulling="2025-12-04 17:45:28.112926872 +0000 UTC m=+1058.145168880" lastFinishedPulling="2025-12-04 17:45:32.286016743 +0000 UTC m=+1062.318258751" observedRunningTime="2025-12-04 17:45:33.177254674 +0000 UTC m=+1063.209496682" watchObservedRunningTime="2025-12-04 17:45:33.17920276 +0000 UTC m=+1063.211444758" Dec 04 17:45:33 crc kubenswrapper[4631]: I1204 17:45:33.192512 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rpshr" podStartSLOduration=2.712412705 podStartE2EDuration="11.192493073s" podCreationTimestamp="2025-12-04 17:45:22 +0000 UTC" firstStartedPulling="2025-12-04 17:45:23.930394919 +0000 UTC m=+1053.962636907" lastFinishedPulling="2025-12-04 17:45:32.410475277 +0000 UTC m=+1062.442717275" observedRunningTime="2025-12-04 17:45:33.189346742 +0000 UTC m=+1063.221588740" watchObservedRunningTime="2025-12-04 17:45:33.192493073 +0000 UTC m=+1063.224735081" Dec 04 17:45:34 crc kubenswrapper[4631]: I1204 17:45:34.167867 4631 generic.go:334] "Generic (PLEG): container finished" podID="ac0998d1-a266-4aeb-9af6-a18659dea142" containerID="08c914afbcd50603af1ec0c5c217c52837f0a519bbb07e0e8a39bb50d09cd0be" exitCode=0 Dec 04 17:45:34 crc kubenswrapper[4631]: I1204 17:45:34.167968 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-d7bk6" event={"ID":"ac0998d1-a266-4aeb-9af6-a18659dea142","Type":"ContainerDied","Data":"08c914afbcd50603af1ec0c5c217c52837f0a519bbb07e0e8a39bb50d09cd0be"} Dec 04 17:45:35 crc kubenswrapper[4631]: I1204 17:45:35.178571 4631 generic.go:334] "Generic (PLEG): container finished" podID="ac0998d1-a266-4aeb-9af6-a18659dea142" containerID="a604b04f3060cb1a299c9f24b92e2d69677d83471a6404627d514286e8b3a507" exitCode=0 Dec 04 17:45:35 crc kubenswrapper[4631]: I1204 17:45:35.180218 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-d7bk6" event={"ID":"ac0998d1-a266-4aeb-9af6-a18659dea142","Type":"ContainerDied","Data":"a604b04f3060cb1a299c9f24b92e2d69677d83471a6404627d514286e8b3a507"} Dec 04 17:45:36 crc kubenswrapper[4631]: I1204 17:45:36.189919 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-d7bk6" event={"ID":"ac0998d1-a266-4aeb-9af6-a18659dea142","Type":"ContainerStarted","Data":"7c894f11cdd6a1e2b79176b81940a88521f044d2597e72df2e09da9c6cd34682"} Dec 04 17:45:36 crc kubenswrapper[4631]: I1204 17:45:36.190253 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-d7bk6" event={"ID":"ac0998d1-a266-4aeb-9af6-a18659dea142","Type":"ContainerStarted","Data":"6cc994fd508d3fd60b1dd762c8e125a19b16dd218ef2a26c7f23bcbc7ecfe152"} Dec 04 17:45:36 crc kubenswrapper[4631]: I1204 17:45:36.190264 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-d7bk6" event={"ID":"ac0998d1-a266-4aeb-9af6-a18659dea142","Type":"ContainerStarted","Data":"258525fca26d8334dedda1e148ba398c38a4c94f187dde4ffe1219df7b10453d"} Dec 04 17:45:36 crc kubenswrapper[4631]: I1204 17:45:36.190272 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-d7bk6" event={"ID":"ac0998d1-a266-4aeb-9af6-a18659dea142","Type":"ContainerStarted","Data":"3903416f113189b672e1329f4af7b59a1aa5bd834dac8568ebe4ecab5ed77670"} Dec 04 17:45:36 crc kubenswrapper[4631]: I1204 17:45:36.190281 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-d7bk6" event={"ID":"ac0998d1-a266-4aeb-9af6-a18659dea142","Type":"ContainerStarted","Data":"af8a05673bbbca74b11969e9fd77cd33ca0e74967f18533ff5b042c8689340af"} Dec 04 17:45:36 crc kubenswrapper[4631]: I1204 17:45:36.625581 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wmzct" Dec 04 17:45:36 crc kubenswrapper[4631]: I1204 17:45:36.625635 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wmzct" Dec 04 17:45:36 crc kubenswrapper[4631]: I1204 17:45:36.687745 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wmzct" Dec 04 17:45:37 crc kubenswrapper[4631]: I1204 17:45:37.213550 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-d7bk6" event={"ID":"ac0998d1-a266-4aeb-9af6-a18659dea142","Type":"ContainerStarted","Data":"3c6bad5694db5aaaa65ce19c096bfcf87606958c3dae01ba7fe1a06c65cd31d5"} Dec 04 17:45:37 crc kubenswrapper[4631]: I1204 17:45:37.254494 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-d7bk6" podStartSLOduration=6.562002882 podStartE2EDuration="15.254464845s" podCreationTimestamp="2025-12-04 17:45:22 +0000 UTC" firstStartedPulling="2025-12-04 17:45:23.738920636 +0000 UTC m=+1053.771162634" lastFinishedPulling="2025-12-04 17:45:32.431382589 +0000 UTC m=+1062.463624597" observedRunningTime="2025-12-04 17:45:37.242699946 +0000 UTC m=+1067.274941954" watchObservedRunningTime="2025-12-04 17:45:37.254464845 +0000 UTC m=+1067.286706883" Dec 04 17:45:37 crc kubenswrapper[4631]: I1204 17:45:37.290100 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wmzct" Dec 04 17:45:37 crc kubenswrapper[4631]: I1204 17:45:37.345960 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wmzct"] Dec 04 17:45:38 crc kubenswrapper[4631]: I1204 17:45:38.221402 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:38 crc kubenswrapper[4631]: I1204 17:45:38.587529 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:38 crc kubenswrapper[4631]: I1204 17:45:38.642885 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:39 crc kubenswrapper[4631]: I1204 17:45:39.228654 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wmzct" podUID="f656e14c-d494-445c-bcf0-25e456d84ad0" containerName="registry-server" containerID="cri-o://cbae908c7ec4445bdfa79f0bb97a295754571b766fb6a4b8a9daeec6e7df9b35" gracePeriod=2 Dec 04 17:45:40 crc kubenswrapper[4631]: I1204 17:45:40.778669 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wmzct" Dec 04 17:45:40 crc kubenswrapper[4631]: I1204 17:45:40.880712 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f656e14c-d494-445c-bcf0-25e456d84ad0-utilities\") pod \"f656e14c-d494-445c-bcf0-25e456d84ad0\" (UID: \"f656e14c-d494-445c-bcf0-25e456d84ad0\") " Dec 04 17:45:40 crc kubenswrapper[4631]: I1204 17:45:40.880960 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7wng\" (UniqueName: \"kubernetes.io/projected/f656e14c-d494-445c-bcf0-25e456d84ad0-kube-api-access-q7wng\") pod \"f656e14c-d494-445c-bcf0-25e456d84ad0\" (UID: \"f656e14c-d494-445c-bcf0-25e456d84ad0\") " Dec 04 17:45:40 crc kubenswrapper[4631]: I1204 17:45:40.881036 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f656e14c-d494-445c-bcf0-25e456d84ad0-catalog-content\") pod \"f656e14c-d494-445c-bcf0-25e456d84ad0\" (UID: \"f656e14c-d494-445c-bcf0-25e456d84ad0\") " Dec 04 17:45:40 crc kubenswrapper[4631]: I1204 17:45:40.882137 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f656e14c-d494-445c-bcf0-25e456d84ad0-utilities" (OuterVolumeSpecName: "utilities") pod "f656e14c-d494-445c-bcf0-25e456d84ad0" (UID: "f656e14c-d494-445c-bcf0-25e456d84ad0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:45:40 crc kubenswrapper[4631]: I1204 17:45:40.887649 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f656e14c-d494-445c-bcf0-25e456d84ad0-kube-api-access-q7wng" (OuterVolumeSpecName: "kube-api-access-q7wng") pod "f656e14c-d494-445c-bcf0-25e456d84ad0" (UID: "f656e14c-d494-445c-bcf0-25e456d84ad0"). InnerVolumeSpecName "kube-api-access-q7wng". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:45:40 crc kubenswrapper[4631]: I1204 17:45:40.928883 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f656e14c-d494-445c-bcf0-25e456d84ad0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f656e14c-d494-445c-bcf0-25e456d84ad0" (UID: "f656e14c-d494-445c-bcf0-25e456d84ad0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:45:40 crc kubenswrapper[4631]: I1204 17:45:40.982418 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7wng\" (UniqueName: \"kubernetes.io/projected/f656e14c-d494-445c-bcf0-25e456d84ad0-kube-api-access-q7wng\") on node \"crc\" DevicePath \"\"" Dec 04 17:45:40 crc kubenswrapper[4631]: I1204 17:45:40.982460 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f656e14c-d494-445c-bcf0-25e456d84ad0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 17:45:40 crc kubenswrapper[4631]: I1204 17:45:40.982474 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f656e14c-d494-445c-bcf0-25e456d84ad0-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 17:45:41 crc kubenswrapper[4631]: I1204 17:45:41.252295 4631 generic.go:334] "Generic (PLEG): container finished" podID="f656e14c-d494-445c-bcf0-25e456d84ad0" containerID="cbae908c7ec4445bdfa79f0bb97a295754571b766fb6a4b8a9daeec6e7df9b35" exitCode=0 Dec 04 17:45:41 crc kubenswrapper[4631]: I1204 17:45:41.252406 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wmzct" Dec 04 17:45:41 crc kubenswrapper[4631]: I1204 17:45:41.252394 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmzct" event={"ID":"f656e14c-d494-445c-bcf0-25e456d84ad0","Type":"ContainerDied","Data":"cbae908c7ec4445bdfa79f0bb97a295754571b766fb6a4b8a9daeec6e7df9b35"} Dec 04 17:45:41 crc kubenswrapper[4631]: I1204 17:45:41.252579 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wmzct" event={"ID":"f656e14c-d494-445c-bcf0-25e456d84ad0","Type":"ContainerDied","Data":"bec7b5ac53d366aa3eac6c0102f885bf44adc6da2b9cd75376cb0b481b4e17af"} Dec 04 17:45:41 crc kubenswrapper[4631]: I1204 17:45:41.252609 4631 scope.go:117] "RemoveContainer" containerID="cbae908c7ec4445bdfa79f0bb97a295754571b766fb6a4b8a9daeec6e7df9b35" Dec 04 17:45:41 crc kubenswrapper[4631]: I1204 17:45:41.275428 4631 scope.go:117] "RemoveContainer" containerID="a57c16bddd880ee81821a4596733c4d236a85c82583c25ab5c5320aeedd94ce3" Dec 04 17:45:41 crc kubenswrapper[4631]: I1204 17:45:41.299025 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wmzct"] Dec 04 17:45:41 crc kubenswrapper[4631]: I1204 17:45:41.306324 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wmzct"] Dec 04 17:45:41 crc kubenswrapper[4631]: I1204 17:45:41.315821 4631 scope.go:117] "RemoveContainer" containerID="b2f1198c5bb2ea6af8eae1f6f3166d4f45ec26527b5c8dee0bf709948ad19439" Dec 04 17:45:41 crc kubenswrapper[4631]: I1204 17:45:41.333108 4631 scope.go:117] "RemoveContainer" containerID="cbae908c7ec4445bdfa79f0bb97a295754571b766fb6a4b8a9daeec6e7df9b35" Dec 04 17:45:41 crc kubenswrapper[4631]: E1204 17:45:41.333564 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbae908c7ec4445bdfa79f0bb97a295754571b766fb6a4b8a9daeec6e7df9b35\": container with ID starting with cbae908c7ec4445bdfa79f0bb97a295754571b766fb6a4b8a9daeec6e7df9b35 not found: ID does not exist" containerID="cbae908c7ec4445bdfa79f0bb97a295754571b766fb6a4b8a9daeec6e7df9b35" Dec 04 17:45:41 crc kubenswrapper[4631]: I1204 17:45:41.333614 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbae908c7ec4445bdfa79f0bb97a295754571b766fb6a4b8a9daeec6e7df9b35"} err="failed to get container status \"cbae908c7ec4445bdfa79f0bb97a295754571b766fb6a4b8a9daeec6e7df9b35\": rpc error: code = NotFound desc = could not find container \"cbae908c7ec4445bdfa79f0bb97a295754571b766fb6a4b8a9daeec6e7df9b35\": container with ID starting with cbae908c7ec4445bdfa79f0bb97a295754571b766fb6a4b8a9daeec6e7df9b35 not found: ID does not exist" Dec 04 17:45:41 crc kubenswrapper[4631]: I1204 17:45:41.333645 4631 scope.go:117] "RemoveContainer" containerID="a57c16bddd880ee81821a4596733c4d236a85c82583c25ab5c5320aeedd94ce3" Dec 04 17:45:41 crc kubenswrapper[4631]: E1204 17:45:41.333918 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a57c16bddd880ee81821a4596733c4d236a85c82583c25ab5c5320aeedd94ce3\": container with ID starting with a57c16bddd880ee81821a4596733c4d236a85c82583c25ab5c5320aeedd94ce3 not found: ID does not exist" containerID="a57c16bddd880ee81821a4596733c4d236a85c82583c25ab5c5320aeedd94ce3" Dec 04 17:45:41 crc kubenswrapper[4631]: I1204 17:45:41.333949 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a57c16bddd880ee81821a4596733c4d236a85c82583c25ab5c5320aeedd94ce3"} err="failed to get container status \"a57c16bddd880ee81821a4596733c4d236a85c82583c25ab5c5320aeedd94ce3\": rpc error: code = NotFound desc = could not find container \"a57c16bddd880ee81821a4596733c4d236a85c82583c25ab5c5320aeedd94ce3\": container with ID starting with a57c16bddd880ee81821a4596733c4d236a85c82583c25ab5c5320aeedd94ce3 not found: ID does not exist" Dec 04 17:45:41 crc kubenswrapper[4631]: I1204 17:45:41.333968 4631 scope.go:117] "RemoveContainer" containerID="b2f1198c5bb2ea6af8eae1f6f3166d4f45ec26527b5c8dee0bf709948ad19439" Dec 04 17:45:41 crc kubenswrapper[4631]: E1204 17:45:41.334220 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2f1198c5bb2ea6af8eae1f6f3166d4f45ec26527b5c8dee0bf709948ad19439\": container with ID starting with b2f1198c5bb2ea6af8eae1f6f3166d4f45ec26527b5c8dee0bf709948ad19439 not found: ID does not exist" containerID="b2f1198c5bb2ea6af8eae1f6f3166d4f45ec26527b5c8dee0bf709948ad19439" Dec 04 17:45:41 crc kubenswrapper[4631]: I1204 17:45:41.334246 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2f1198c5bb2ea6af8eae1f6f3166d4f45ec26527b5c8dee0bf709948ad19439"} err="failed to get container status \"b2f1198c5bb2ea6af8eae1f6f3166d4f45ec26527b5c8dee0bf709948ad19439\": rpc error: code = NotFound desc = could not find container \"b2f1198c5bb2ea6af8eae1f6f3166d4f45ec26527b5c8dee0bf709948ad19439\": container with ID starting with b2f1198c5bb2ea6af8eae1f6f3166d4f45ec26527b5c8dee0bf709948ad19439 not found: ID does not exist" Dec 04 17:45:42 crc kubenswrapper[4631]: I1204 17:45:42.254854 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f656e14c-d494-445c-bcf0-25e456d84ad0" path="/var/lib/kubelet/pods/f656e14c-d494-445c-bcf0-25e456d84ad0/volumes" Dec 04 17:45:43 crc kubenswrapper[4631]: I1204 17:45:43.606682 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-rpshr" Dec 04 17:45:43 crc kubenswrapper[4631]: I1204 17:45:43.693507 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-45ph8" Dec 04 17:45:46 crc kubenswrapper[4631]: I1204 17:45:46.930299 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-d794m"] Dec 04 17:45:46 crc kubenswrapper[4631]: E1204 17:45:46.931136 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f656e14c-d494-445c-bcf0-25e456d84ad0" containerName="extract-utilities" Dec 04 17:45:46 crc kubenswrapper[4631]: I1204 17:45:46.931155 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="f656e14c-d494-445c-bcf0-25e456d84ad0" containerName="extract-utilities" Dec 04 17:45:46 crc kubenswrapper[4631]: E1204 17:45:46.931170 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f656e14c-d494-445c-bcf0-25e456d84ad0" containerName="registry-server" Dec 04 17:45:46 crc kubenswrapper[4631]: I1204 17:45:46.931179 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="f656e14c-d494-445c-bcf0-25e456d84ad0" containerName="registry-server" Dec 04 17:45:46 crc kubenswrapper[4631]: E1204 17:45:46.931210 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f656e14c-d494-445c-bcf0-25e456d84ad0" containerName="extract-content" Dec 04 17:45:46 crc kubenswrapper[4631]: I1204 17:45:46.931220 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="f656e14c-d494-445c-bcf0-25e456d84ad0" containerName="extract-content" Dec 04 17:45:46 crc kubenswrapper[4631]: I1204 17:45:46.931409 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="f656e14c-d494-445c-bcf0-25e456d84ad0" containerName="registry-server" Dec 04 17:45:46 crc kubenswrapper[4631]: I1204 17:45:46.932005 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-d794m" Dec 04 17:45:46 crc kubenswrapper[4631]: I1204 17:45:46.937108 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 04 17:45:46 crc kubenswrapper[4631]: I1204 17:45:46.937884 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-fzcq4" Dec 04 17:45:46 crc kubenswrapper[4631]: I1204 17:45:46.941598 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-d794m"] Dec 04 17:45:46 crc kubenswrapper[4631]: I1204 17:45:46.986912 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 04 17:45:47 crc kubenswrapper[4631]: I1204 17:45:47.081616 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2dm7\" (UniqueName: \"kubernetes.io/projected/b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f-kube-api-access-w2dm7\") pod \"openstack-operator-index-d794m\" (UID: \"b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f\") " pod="openstack-operators/openstack-operator-index-d794m" Dec 04 17:45:47 crc kubenswrapper[4631]: I1204 17:45:47.183334 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2dm7\" (UniqueName: \"kubernetes.io/projected/b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f-kube-api-access-w2dm7\") pod \"openstack-operator-index-d794m\" (UID: \"b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f\") " pod="openstack-operators/openstack-operator-index-d794m" Dec 04 17:45:47 crc kubenswrapper[4631]: I1204 17:45:47.212266 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2dm7\" (UniqueName: \"kubernetes.io/projected/b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f-kube-api-access-w2dm7\") pod \"openstack-operator-index-d794m\" (UID: \"b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f\") " pod="openstack-operators/openstack-operator-index-d794m" Dec 04 17:45:47 crc kubenswrapper[4631]: I1204 17:45:47.299485 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-d794m" Dec 04 17:45:47 crc kubenswrapper[4631]: I1204 17:45:47.643142 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-d794m"] Dec 04 17:45:48 crc kubenswrapper[4631]: I1204 17:45:48.313041 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-d794m" event={"ID":"b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f","Type":"ContainerStarted","Data":"ec6f55ac62bfb3ea723f50ab75f1dbc11b5cc0484c10a49eb36117768671c7f0"} Dec 04 17:45:50 crc kubenswrapper[4631]: I1204 17:45:50.276988 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-d794m"] Dec 04 17:45:50 crc kubenswrapper[4631]: I1204 17:45:50.881267 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-c9xfp"] Dec 04 17:45:50 crc kubenswrapper[4631]: I1204 17:45:50.883106 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-c9xfp" Dec 04 17:45:50 crc kubenswrapper[4631]: I1204 17:45:50.888114 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-c9xfp"] Dec 04 17:45:50 crc kubenswrapper[4631]: I1204 17:45:50.930882 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gs9qf\" (UniqueName: \"kubernetes.io/projected/f1c2db29-609d-4d06-bf5a-702536504419-kube-api-access-gs9qf\") pod \"openstack-operator-index-c9xfp\" (UID: \"f1c2db29-609d-4d06-bf5a-702536504419\") " pod="openstack-operators/openstack-operator-index-c9xfp" Dec 04 17:45:51 crc kubenswrapper[4631]: I1204 17:45:51.031870 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gs9qf\" (UniqueName: \"kubernetes.io/projected/f1c2db29-609d-4d06-bf5a-702536504419-kube-api-access-gs9qf\") pod \"openstack-operator-index-c9xfp\" (UID: \"f1c2db29-609d-4d06-bf5a-702536504419\") " pod="openstack-operators/openstack-operator-index-c9xfp" Dec 04 17:45:51 crc kubenswrapper[4631]: I1204 17:45:51.050941 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gs9qf\" (UniqueName: \"kubernetes.io/projected/f1c2db29-609d-4d06-bf5a-702536504419-kube-api-access-gs9qf\") pod \"openstack-operator-index-c9xfp\" (UID: \"f1c2db29-609d-4d06-bf5a-702536504419\") " pod="openstack-operators/openstack-operator-index-c9xfp" Dec 04 17:45:51 crc kubenswrapper[4631]: I1204 17:45:51.198674 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-c9xfp" Dec 04 17:45:51 crc kubenswrapper[4631]: I1204 17:45:51.366953 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-d794m" event={"ID":"b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f","Type":"ContainerStarted","Data":"cc4fdb4eb75743eb6a5826748f0375c761f2e48894923764cbca9d5fabceb1be"} Dec 04 17:45:51 crc kubenswrapper[4631]: I1204 17:45:51.367209 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-d794m" podUID="b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f" containerName="registry-server" containerID="cri-o://cc4fdb4eb75743eb6a5826748f0375c761f2e48894923764cbca9d5fabceb1be" gracePeriod=2 Dec 04 17:45:51 crc kubenswrapper[4631]: I1204 17:45:51.412398 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-d794m" podStartSLOduration=2.442835111 podStartE2EDuration="5.41237816s" podCreationTimestamp="2025-12-04 17:45:46 +0000 UTC" firstStartedPulling="2025-12-04 17:45:47.649427137 +0000 UTC m=+1077.681669135" lastFinishedPulling="2025-12-04 17:45:50.618970166 +0000 UTC m=+1080.651212184" observedRunningTime="2025-12-04 17:45:51.381759428 +0000 UTC m=+1081.414001416" watchObservedRunningTime="2025-12-04 17:45:51.41237816 +0000 UTC m=+1081.444620158" Dec 04 17:45:51 crc kubenswrapper[4631]: I1204 17:45:51.678329 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-c9xfp"] Dec 04 17:45:51 crc kubenswrapper[4631]: W1204 17:45:51.681910 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1c2db29_609d_4d06_bf5a_702536504419.slice/crio-ecd7f1ff39ca58019f281a23e88909c76deefc9b1a6a7f91e60019c7c65ee16d WatchSource:0}: Error finding container ecd7f1ff39ca58019f281a23e88909c76deefc9b1a6a7f91e60019c7c65ee16d: Status 404 returned error can't find the container with id ecd7f1ff39ca58019f281a23e88909c76deefc9b1a6a7f91e60019c7c65ee16d Dec 04 17:45:51 crc kubenswrapper[4631]: I1204 17:45:51.762703 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-d794m" Dec 04 17:45:51 crc kubenswrapper[4631]: I1204 17:45:51.952082 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2dm7\" (UniqueName: \"kubernetes.io/projected/b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f-kube-api-access-w2dm7\") pod \"b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f\" (UID: \"b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f\") " Dec 04 17:45:51 crc kubenswrapper[4631]: I1204 17:45:51.957603 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f-kube-api-access-w2dm7" (OuterVolumeSpecName: "kube-api-access-w2dm7") pod "b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f" (UID: "b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f"). InnerVolumeSpecName "kube-api-access-w2dm7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:45:52 crc kubenswrapper[4631]: I1204 17:45:52.053507 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2dm7\" (UniqueName: \"kubernetes.io/projected/b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f-kube-api-access-w2dm7\") on node \"crc\" DevicePath \"\"" Dec 04 17:45:52 crc kubenswrapper[4631]: I1204 17:45:52.376528 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-c9xfp" event={"ID":"f1c2db29-609d-4d06-bf5a-702536504419","Type":"ContainerStarted","Data":"d296528bbce5f0bf90d98127c8984885fd4e31a57b2f0f45d9addb6024f333d6"} Dec 04 17:45:52 crc kubenswrapper[4631]: I1204 17:45:52.376607 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-c9xfp" event={"ID":"f1c2db29-609d-4d06-bf5a-702536504419","Type":"ContainerStarted","Data":"ecd7f1ff39ca58019f281a23e88909c76deefc9b1a6a7f91e60019c7c65ee16d"} Dec 04 17:45:52 crc kubenswrapper[4631]: I1204 17:45:52.378857 4631 generic.go:334] "Generic (PLEG): container finished" podID="b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f" containerID="cc4fdb4eb75743eb6a5826748f0375c761f2e48894923764cbca9d5fabceb1be" exitCode=0 Dec 04 17:45:52 crc kubenswrapper[4631]: I1204 17:45:52.378914 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-d794m" event={"ID":"b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f","Type":"ContainerDied","Data":"cc4fdb4eb75743eb6a5826748f0375c761f2e48894923764cbca9d5fabceb1be"} Dec 04 17:45:52 crc kubenswrapper[4631]: I1204 17:45:52.378946 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-d794m" event={"ID":"b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f","Type":"ContainerDied","Data":"ec6f55ac62bfb3ea723f50ab75f1dbc11b5cc0484c10a49eb36117768671c7f0"} Dec 04 17:45:52 crc kubenswrapper[4631]: I1204 17:45:52.378942 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-d794m" Dec 04 17:45:52 crc kubenswrapper[4631]: I1204 17:45:52.379083 4631 scope.go:117] "RemoveContainer" containerID="cc4fdb4eb75743eb6a5826748f0375c761f2e48894923764cbca9d5fabceb1be" Dec 04 17:45:52 crc kubenswrapper[4631]: I1204 17:45:52.408342 4631 scope.go:117] "RemoveContainer" containerID="cc4fdb4eb75743eb6a5826748f0375c761f2e48894923764cbca9d5fabceb1be" Dec 04 17:45:52 crc kubenswrapper[4631]: E1204 17:45:52.409771 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc4fdb4eb75743eb6a5826748f0375c761f2e48894923764cbca9d5fabceb1be\": container with ID starting with cc4fdb4eb75743eb6a5826748f0375c761f2e48894923764cbca9d5fabceb1be not found: ID does not exist" containerID="cc4fdb4eb75743eb6a5826748f0375c761f2e48894923764cbca9d5fabceb1be" Dec 04 17:45:52 crc kubenswrapper[4631]: I1204 17:45:52.409865 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc4fdb4eb75743eb6a5826748f0375c761f2e48894923764cbca9d5fabceb1be"} err="failed to get container status \"cc4fdb4eb75743eb6a5826748f0375c761f2e48894923764cbca9d5fabceb1be\": rpc error: code = NotFound desc = could not find container \"cc4fdb4eb75743eb6a5826748f0375c761f2e48894923764cbca9d5fabceb1be\": container with ID starting with cc4fdb4eb75743eb6a5826748f0375c761f2e48894923764cbca9d5fabceb1be not found: ID does not exist" Dec 04 17:45:52 crc kubenswrapper[4631]: I1204 17:45:52.416277 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-c9xfp" podStartSLOduration=2.3666092340000002 podStartE2EDuration="2.416243842s" podCreationTimestamp="2025-12-04 17:45:50 +0000 UTC" firstStartedPulling="2025-12-04 17:45:51.686266386 +0000 UTC m=+1081.718508384" lastFinishedPulling="2025-12-04 17:45:51.735900994 +0000 UTC m=+1081.768142992" observedRunningTime="2025-12-04 17:45:52.409507158 +0000 UTC m=+1082.441749166" watchObservedRunningTime="2025-12-04 17:45:52.416243842 +0000 UTC m=+1082.448485880" Dec 04 17:45:52 crc kubenswrapper[4631]: I1204 17:45:52.433551 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-d794m"] Dec 04 17:45:52 crc kubenswrapper[4631]: I1204 17:45:52.438151 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-d794m"] Dec 04 17:45:53 crc kubenswrapper[4631]: I1204 17:45:53.593260 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-d7bk6" Dec 04 17:45:54 crc kubenswrapper[4631]: I1204 17:45:54.248168 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f" path="/var/lib/kubelet/pods/b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f/volumes" Dec 04 17:46:01 crc kubenswrapper[4631]: I1204 17:46:01.199802 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-c9xfp" Dec 04 17:46:01 crc kubenswrapper[4631]: I1204 17:46:01.200352 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-c9xfp" Dec 04 17:46:01 crc kubenswrapper[4631]: I1204 17:46:01.241190 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-c9xfp" Dec 04 17:46:01 crc kubenswrapper[4631]: I1204 17:46:01.502975 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-c9xfp" Dec 04 17:46:02 crc kubenswrapper[4631]: I1204 17:46:02.736512 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs"] Dec 04 17:46:02 crc kubenswrapper[4631]: E1204 17:46:02.737244 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f" containerName="registry-server" Dec 04 17:46:02 crc kubenswrapper[4631]: I1204 17:46:02.737268 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f" containerName="registry-server" Dec 04 17:46:02 crc kubenswrapper[4631]: I1204 17:46:02.737536 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="b42af5ea-fbd8-4d03-a63d-4f6be37bdc7f" containerName="registry-server" Dec 04 17:46:02 crc kubenswrapper[4631]: I1204 17:46:02.738912 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs" Dec 04 17:46:02 crc kubenswrapper[4631]: I1204 17:46:02.741015 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-kgq97" Dec 04 17:46:02 crc kubenswrapper[4631]: I1204 17:46:02.756606 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs"] Dec 04 17:46:02 crc kubenswrapper[4631]: I1204 17:46:02.837935 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ad8edd24-7550-455a-b394-343d4e2ca11b-util\") pod \"69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs\" (UID: \"ad8edd24-7550-455a-b394-343d4e2ca11b\") " pod="openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs" Dec 04 17:46:02 crc kubenswrapper[4631]: I1204 17:46:02.838247 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ad8edd24-7550-455a-b394-343d4e2ca11b-bundle\") pod \"69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs\" (UID: \"ad8edd24-7550-455a-b394-343d4e2ca11b\") " pod="openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs" Dec 04 17:46:02 crc kubenswrapper[4631]: I1204 17:46:02.838343 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4c6z\" (UniqueName: \"kubernetes.io/projected/ad8edd24-7550-455a-b394-343d4e2ca11b-kube-api-access-z4c6z\") pod \"69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs\" (UID: \"ad8edd24-7550-455a-b394-343d4e2ca11b\") " pod="openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs" Dec 04 17:46:02 crc kubenswrapper[4631]: I1204 17:46:02.939632 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ad8edd24-7550-455a-b394-343d4e2ca11b-util\") pod \"69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs\" (UID: \"ad8edd24-7550-455a-b394-343d4e2ca11b\") " pod="openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs" Dec 04 17:46:02 crc kubenswrapper[4631]: I1204 17:46:02.939765 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ad8edd24-7550-455a-b394-343d4e2ca11b-bundle\") pod \"69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs\" (UID: \"ad8edd24-7550-455a-b394-343d4e2ca11b\") " pod="openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs" Dec 04 17:46:02 crc kubenswrapper[4631]: I1204 17:46:02.939805 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4c6z\" (UniqueName: \"kubernetes.io/projected/ad8edd24-7550-455a-b394-343d4e2ca11b-kube-api-access-z4c6z\") pod \"69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs\" (UID: \"ad8edd24-7550-455a-b394-343d4e2ca11b\") " pod="openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs" Dec 04 17:46:02 crc kubenswrapper[4631]: I1204 17:46:02.940118 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ad8edd24-7550-455a-b394-343d4e2ca11b-util\") pod \"69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs\" (UID: \"ad8edd24-7550-455a-b394-343d4e2ca11b\") " pod="openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs" Dec 04 17:46:02 crc kubenswrapper[4631]: I1204 17:46:02.940175 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ad8edd24-7550-455a-b394-343d4e2ca11b-bundle\") pod \"69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs\" (UID: \"ad8edd24-7550-455a-b394-343d4e2ca11b\") " pod="openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs" Dec 04 17:46:02 crc kubenswrapper[4631]: I1204 17:46:02.960860 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4c6z\" (UniqueName: \"kubernetes.io/projected/ad8edd24-7550-455a-b394-343d4e2ca11b-kube-api-access-z4c6z\") pod \"69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs\" (UID: \"ad8edd24-7550-455a-b394-343d4e2ca11b\") " pod="openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs" Dec 04 17:46:03 crc kubenswrapper[4631]: I1204 17:46:03.066037 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs" Dec 04 17:46:03 crc kubenswrapper[4631]: I1204 17:46:03.470822 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs"] Dec 04 17:46:04 crc kubenswrapper[4631]: I1204 17:46:04.460365 4631 generic.go:334] "Generic (PLEG): container finished" podID="ad8edd24-7550-455a-b394-343d4e2ca11b" containerID="351976cf95a0f586c426ea65718f0c4ed9a1ce48bfa93d7a183fda046d9c31a0" exitCode=0 Dec 04 17:46:04 crc kubenswrapper[4631]: I1204 17:46:04.460482 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs" event={"ID":"ad8edd24-7550-455a-b394-343d4e2ca11b","Type":"ContainerDied","Data":"351976cf95a0f586c426ea65718f0c4ed9a1ce48bfa93d7a183fda046d9c31a0"} Dec 04 17:46:04 crc kubenswrapper[4631]: I1204 17:46:04.460650 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs" event={"ID":"ad8edd24-7550-455a-b394-343d4e2ca11b","Type":"ContainerStarted","Data":"86d4dcb1a168161c65a7950902f18ac0373dfd4e4565f65494a59eac8863b171"} Dec 04 17:46:05 crc kubenswrapper[4631]: I1204 17:46:05.467118 4631 generic.go:334] "Generic (PLEG): container finished" podID="ad8edd24-7550-455a-b394-343d4e2ca11b" containerID="9376fd9ab2b9cf530a3800a0b5d0e0fb5eadd421ab511e3bfeb2113239bb5b84" exitCode=0 Dec 04 17:46:05 crc kubenswrapper[4631]: I1204 17:46:05.467432 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs" event={"ID":"ad8edd24-7550-455a-b394-343d4e2ca11b","Type":"ContainerDied","Data":"9376fd9ab2b9cf530a3800a0b5d0e0fb5eadd421ab511e3bfeb2113239bb5b84"} Dec 04 17:46:06 crc kubenswrapper[4631]: I1204 17:46:06.479288 4631 generic.go:334] "Generic (PLEG): container finished" podID="ad8edd24-7550-455a-b394-343d4e2ca11b" containerID="7f54350864c807d36fb930a61aa0ab095646d1551264707fd1edf15fa8dd0130" exitCode=0 Dec 04 17:46:06 crc kubenswrapper[4631]: I1204 17:46:06.479470 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs" event={"ID":"ad8edd24-7550-455a-b394-343d4e2ca11b","Type":"ContainerDied","Data":"7f54350864c807d36fb930a61aa0ab095646d1551264707fd1edf15fa8dd0130"} Dec 04 17:46:07 crc kubenswrapper[4631]: I1204 17:46:07.787391 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs" Dec 04 17:46:07 crc kubenswrapper[4631]: I1204 17:46:07.817115 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ad8edd24-7550-455a-b394-343d4e2ca11b-bundle\") pod \"ad8edd24-7550-455a-b394-343d4e2ca11b\" (UID: \"ad8edd24-7550-455a-b394-343d4e2ca11b\") " Dec 04 17:46:07 crc kubenswrapper[4631]: I1204 17:46:07.817411 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z4c6z\" (UniqueName: \"kubernetes.io/projected/ad8edd24-7550-455a-b394-343d4e2ca11b-kube-api-access-z4c6z\") pod \"ad8edd24-7550-455a-b394-343d4e2ca11b\" (UID: \"ad8edd24-7550-455a-b394-343d4e2ca11b\") " Dec 04 17:46:07 crc kubenswrapper[4631]: I1204 17:46:07.817581 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ad8edd24-7550-455a-b394-343d4e2ca11b-util\") pod \"ad8edd24-7550-455a-b394-343d4e2ca11b\" (UID: \"ad8edd24-7550-455a-b394-343d4e2ca11b\") " Dec 04 17:46:07 crc kubenswrapper[4631]: I1204 17:46:07.817984 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad8edd24-7550-455a-b394-343d4e2ca11b-bundle" (OuterVolumeSpecName: "bundle") pod "ad8edd24-7550-455a-b394-343d4e2ca11b" (UID: "ad8edd24-7550-455a-b394-343d4e2ca11b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:46:07 crc kubenswrapper[4631]: I1204 17:46:07.823636 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad8edd24-7550-455a-b394-343d4e2ca11b-kube-api-access-z4c6z" (OuterVolumeSpecName: "kube-api-access-z4c6z") pod "ad8edd24-7550-455a-b394-343d4e2ca11b" (UID: "ad8edd24-7550-455a-b394-343d4e2ca11b"). InnerVolumeSpecName "kube-api-access-z4c6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:46:07 crc kubenswrapper[4631]: I1204 17:46:07.838006 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad8edd24-7550-455a-b394-343d4e2ca11b-util" (OuterVolumeSpecName: "util") pod "ad8edd24-7550-455a-b394-343d4e2ca11b" (UID: "ad8edd24-7550-455a-b394-343d4e2ca11b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:46:07 crc kubenswrapper[4631]: I1204 17:46:07.918985 4631 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ad8edd24-7550-455a-b394-343d4e2ca11b-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:46:07 crc kubenswrapper[4631]: I1204 17:46:07.919222 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z4c6z\" (UniqueName: \"kubernetes.io/projected/ad8edd24-7550-455a-b394-343d4e2ca11b-kube-api-access-z4c6z\") on node \"crc\" DevicePath \"\"" Dec 04 17:46:07 crc kubenswrapper[4631]: I1204 17:46:07.919303 4631 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ad8edd24-7550-455a-b394-343d4e2ca11b-util\") on node \"crc\" DevicePath \"\"" Dec 04 17:46:08 crc kubenswrapper[4631]: I1204 17:46:08.493069 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs" event={"ID":"ad8edd24-7550-455a-b394-343d4e2ca11b","Type":"ContainerDied","Data":"86d4dcb1a168161c65a7950902f18ac0373dfd4e4565f65494a59eac8863b171"} Dec 04 17:46:08 crc kubenswrapper[4631]: I1204 17:46:08.493114 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86d4dcb1a168161c65a7950902f18ac0373dfd4e4565f65494a59eac8863b171" Dec 04 17:46:08 crc kubenswrapper[4631]: I1204 17:46:08.493123 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs" Dec 04 17:46:11 crc kubenswrapper[4631]: I1204 17:46:11.606812 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-66bcc8f984-lzslx"] Dec 04 17:46:11 crc kubenswrapper[4631]: E1204 17:46:11.607427 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad8edd24-7550-455a-b394-343d4e2ca11b" containerName="util" Dec 04 17:46:11 crc kubenswrapper[4631]: I1204 17:46:11.607445 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad8edd24-7550-455a-b394-343d4e2ca11b" containerName="util" Dec 04 17:46:11 crc kubenswrapper[4631]: E1204 17:46:11.607461 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad8edd24-7550-455a-b394-343d4e2ca11b" containerName="pull" Dec 04 17:46:11 crc kubenswrapper[4631]: I1204 17:46:11.607469 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad8edd24-7550-455a-b394-343d4e2ca11b" containerName="pull" Dec 04 17:46:11 crc kubenswrapper[4631]: E1204 17:46:11.607483 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad8edd24-7550-455a-b394-343d4e2ca11b" containerName="extract" Dec 04 17:46:11 crc kubenswrapper[4631]: I1204 17:46:11.607495 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad8edd24-7550-455a-b394-343d4e2ca11b" containerName="extract" Dec 04 17:46:11 crc kubenswrapper[4631]: I1204 17:46:11.607646 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad8edd24-7550-455a-b394-343d4e2ca11b" containerName="extract" Dec 04 17:46:11 crc kubenswrapper[4631]: I1204 17:46:11.608164 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-66bcc8f984-lzslx" Dec 04 17:46:11 crc kubenswrapper[4631]: I1204 17:46:11.617545 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-8l4mv" Dec 04 17:46:11 crc kubenswrapper[4631]: I1204 17:46:11.642094 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-66bcc8f984-lzslx"] Dec 04 17:46:11 crc kubenswrapper[4631]: I1204 17:46:11.669721 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tzwz\" (UniqueName: \"kubernetes.io/projected/75ee8627-c453-43a3-a933-080907b850cc-kube-api-access-8tzwz\") pod \"openstack-operator-controller-operator-66bcc8f984-lzslx\" (UID: \"75ee8627-c453-43a3-a933-080907b850cc\") " pod="openstack-operators/openstack-operator-controller-operator-66bcc8f984-lzslx" Dec 04 17:46:11 crc kubenswrapper[4631]: I1204 17:46:11.770947 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tzwz\" (UniqueName: \"kubernetes.io/projected/75ee8627-c453-43a3-a933-080907b850cc-kube-api-access-8tzwz\") pod \"openstack-operator-controller-operator-66bcc8f984-lzslx\" (UID: \"75ee8627-c453-43a3-a933-080907b850cc\") " pod="openstack-operators/openstack-operator-controller-operator-66bcc8f984-lzslx" Dec 04 17:46:11 crc kubenswrapper[4631]: I1204 17:46:11.788360 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tzwz\" (UniqueName: \"kubernetes.io/projected/75ee8627-c453-43a3-a933-080907b850cc-kube-api-access-8tzwz\") pod \"openstack-operator-controller-operator-66bcc8f984-lzslx\" (UID: \"75ee8627-c453-43a3-a933-080907b850cc\") " pod="openstack-operators/openstack-operator-controller-operator-66bcc8f984-lzslx" Dec 04 17:46:11 crc kubenswrapper[4631]: I1204 17:46:11.933277 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-66bcc8f984-lzslx" Dec 04 17:46:12 crc kubenswrapper[4631]: I1204 17:46:12.173679 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-66bcc8f984-lzslx"] Dec 04 17:46:12 crc kubenswrapper[4631]: I1204 17:46:12.520231 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-66bcc8f984-lzslx" event={"ID":"75ee8627-c453-43a3-a933-080907b850cc","Type":"ContainerStarted","Data":"d1a0e6c1586efaf3782bedcac5c146008da37b082113d7bc9ff5ac9793dd7b4d"} Dec 04 17:46:16 crc kubenswrapper[4631]: I1204 17:46:16.545574 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-66bcc8f984-lzslx" event={"ID":"75ee8627-c453-43a3-a933-080907b850cc","Type":"ContainerStarted","Data":"c74a4252d8d5d195c2d7e7be57053ba4947da4a01721e78278ea8043cf8a150d"} Dec 04 17:46:16 crc kubenswrapper[4631]: I1204 17:46:16.546152 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-66bcc8f984-lzslx" Dec 04 17:46:16 crc kubenswrapper[4631]: I1204 17:46:16.575954 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-66bcc8f984-lzslx" podStartSLOduration=1.464138069 podStartE2EDuration="5.575931446s" podCreationTimestamp="2025-12-04 17:46:11 +0000 UTC" firstStartedPulling="2025-12-04 17:46:12.18421353 +0000 UTC m=+1102.216455518" lastFinishedPulling="2025-12-04 17:46:16.296006897 +0000 UTC m=+1106.328248895" observedRunningTime="2025-12-04 17:46:16.571484708 +0000 UTC m=+1106.603726726" watchObservedRunningTime="2025-12-04 17:46:16.575931446 +0000 UTC m=+1106.608173454" Dec 04 17:46:21 crc kubenswrapper[4631]: I1204 17:46:21.938189 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-66bcc8f984-lzslx" Dec 04 17:46:36 crc kubenswrapper[4631]: I1204 17:46:36.022732 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:46:36 crc kubenswrapper[4631]: I1204 17:46:36.024546 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:46:45 crc kubenswrapper[4631]: I1204 17:46:45.951946 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-9s4jp"] Dec 04 17:46:45 crc kubenswrapper[4631]: I1204 17:46:45.953362 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-9s4jp" Dec 04 17:46:45 crc kubenswrapper[4631]: I1204 17:46:45.957531 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-67cns"] Dec 04 17:46:45 crc kubenswrapper[4631]: I1204 17:46:45.957631 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-kkzfj" Dec 04 17:46:45 crc kubenswrapper[4631]: I1204 17:46:45.958657 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-67cns" Dec 04 17:46:45 crc kubenswrapper[4631]: I1204 17:46:45.961190 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-br9xc" Dec 04 17:46:45 crc kubenswrapper[4631]: I1204 17:46:45.974541 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-p6vp2"] Dec 04 17:46:45 crc kubenswrapper[4631]: I1204 17:46:45.975461 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-p6vp2" Dec 04 17:46:45 crc kubenswrapper[4631]: I1204 17:46:45.980027 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-ldclc"] Dec 04 17:46:45 crc kubenswrapper[4631]: I1204 17:46:45.983663 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-rjnxw" Dec 04 17:46:45 crc kubenswrapper[4631]: I1204 17:46:45.984605 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ldclc" Dec 04 17:46:45 crc kubenswrapper[4631]: I1204 17:46:45.985867 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-d6cqv" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.022041 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-p6vp2"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.026012 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-9s4jp"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.041867 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-67cns"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.046126 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-ldclc"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.071386 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qkqk\" (UniqueName: \"kubernetes.io/projected/709a39e5-9fe0-4861-8761-774f26a4a315-kube-api-access-9qkqk\") pod \"cinder-operator-controller-manager-859b6ccc6-9s4jp\" (UID: \"709a39e5-9fe0-4861-8761-774f26a4a315\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-9s4jp" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.071442 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlrq8\" (UniqueName: \"kubernetes.io/projected/30be0340-cc50-4244-9b27-7e41f86bf113-kube-api-access-jlrq8\") pod \"barbican-operator-controller-manager-7d9dfd778-67cns\" (UID: \"30be0340-cc50-4244-9b27-7e41f86bf113\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-67cns" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.071514 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qfzw\" (UniqueName: \"kubernetes.io/projected/dd2fd0ee-2bee-4cd2-9c24-a0c0dce37b46-kube-api-access-2qfzw\") pod \"designate-operator-controller-manager-78b4bc895b-p6vp2\" (UID: \"dd2fd0ee-2bee-4cd2-9c24-a0c0dce37b46\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-p6vp2" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.123450 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jwf42"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.124677 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jwf42" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.134953 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-cz8hk"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.136039 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-cz8hk" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.136531 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-8wr58" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.137843 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-2bzwd"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.138630 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-2bzwd" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.142747 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-lj7z9" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.142914 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-rb8t4" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.146955 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-57k7z"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.148265 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-57k7z" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.151326 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.151585 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-rdsg9" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.151682 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jwf42"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.166841 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-cz8hk"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.173976 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlrq8\" (UniqueName: \"kubernetes.io/projected/30be0340-cc50-4244-9b27-7e41f86bf113-kube-api-access-jlrq8\") pod \"barbican-operator-controller-manager-7d9dfd778-67cns\" (UID: \"30be0340-cc50-4244-9b27-7e41f86bf113\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-67cns" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.174036 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6n5wc\" (UniqueName: \"kubernetes.io/projected/32886d79-72a7-4318-8098-718f0f55f61e-kube-api-access-6n5wc\") pod \"glance-operator-controller-manager-77987cd8cd-ldclc\" (UID: \"32886d79-72a7-4318-8098-718f0f55f61e\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ldclc" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.174113 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qfzw\" (UniqueName: \"kubernetes.io/projected/dd2fd0ee-2bee-4cd2-9c24-a0c0dce37b46-kube-api-access-2qfzw\") pod \"designate-operator-controller-manager-78b4bc895b-p6vp2\" (UID: \"dd2fd0ee-2bee-4cd2-9c24-a0c0dce37b46\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-p6vp2" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.174178 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qkqk\" (UniqueName: \"kubernetes.io/projected/709a39e5-9fe0-4861-8761-774f26a4a315-kube-api-access-9qkqk\") pod \"cinder-operator-controller-manager-859b6ccc6-9s4jp\" (UID: \"709a39e5-9fe0-4861-8761-774f26a4a315\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-9s4jp" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.192448 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-57k7z"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.204773 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-xdm6x"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.206085 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xdm6x" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.212886 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-xbwg5" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.214221 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-sw9bk"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.215408 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sw9bk" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.219360 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-tmr9d"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.220051 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qfzw\" (UniqueName: \"kubernetes.io/projected/dd2fd0ee-2bee-4cd2-9c24-a0c0dce37b46-kube-api-access-2qfzw\") pod \"designate-operator-controller-manager-78b4bc895b-p6vp2\" (UID: \"dd2fd0ee-2bee-4cd2-9c24-a0c0dce37b46\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-p6vp2" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.220309 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-tmr9d" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.229796 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlrq8\" (UniqueName: \"kubernetes.io/projected/30be0340-cc50-4244-9b27-7e41f86bf113-kube-api-access-jlrq8\") pod \"barbican-operator-controller-manager-7d9dfd778-67cns\" (UID: \"30be0340-cc50-4244-9b27-7e41f86bf113\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-67cns" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.230392 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-rtb62" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.230554 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-g2t5l" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.234574 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-2bzwd"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.248003 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-xdm6x"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.259975 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qkqk\" (UniqueName: \"kubernetes.io/projected/709a39e5-9fe0-4861-8761-774f26a4a315-kube-api-access-9qkqk\") pod \"cinder-operator-controller-manager-859b6ccc6-9s4jp\" (UID: \"709a39e5-9fe0-4861-8761-774f26a4a315\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-9s4jp" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.263858 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2hbp2"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.264949 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2hbp2" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.273912 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-9s4jp" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.273948 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-49dt9" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.274818 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtcgk\" (UniqueName: \"kubernetes.io/projected/fdf92431-a279-4eb5-8e5d-56e353febcf2-kube-api-access-rtcgk\") pod \"infra-operator-controller-manager-57548d458d-57k7z\" (UID: \"fdf92431-a279-4eb5-8e5d-56e353febcf2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-57k7z" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.274844 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrmdt\" (UniqueName: \"kubernetes.io/projected/5a715ee8-c048-4447-b3fc-5f94121c0e7e-kube-api-access-hrmdt\") pod \"horizon-operator-controller-manager-68c6d99b8f-cz8hk\" (UID: \"5a715ee8-c048-4447-b3fc-5f94121c0e7e\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-cz8hk" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.274874 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8z6l\" (UniqueName: \"kubernetes.io/projected/1d24a40a-06b2-43e4-9921-05dd2e8f27ea-kube-api-access-h8z6l\") pod \"heat-operator-controller-manager-5f64f6f8bb-jwf42\" (UID: \"1d24a40a-06b2-43e4-9921-05dd2e8f27ea\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jwf42" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.274900 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6n5wc\" (UniqueName: \"kubernetes.io/projected/32886d79-72a7-4318-8098-718f0f55f61e-kube-api-access-6n5wc\") pod \"glance-operator-controller-manager-77987cd8cd-ldclc\" (UID: \"32886d79-72a7-4318-8098-718f0f55f61e\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ldclc" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.274922 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fdf92431-a279-4eb5-8e5d-56e353febcf2-cert\") pod \"infra-operator-controller-manager-57548d458d-57k7z\" (UID: \"fdf92431-a279-4eb5-8e5d-56e353febcf2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-57k7z" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.274941 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6dfx\" (UniqueName: \"kubernetes.io/projected/b8212ac4-255e-4de2-ac13-0033682d7550-kube-api-access-m6dfx\") pod \"ironic-operator-controller-manager-6c548fd776-2bzwd\" (UID: \"b8212ac4-255e-4de2-ac13-0033682d7550\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-2bzwd" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.281706 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-czzvh"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.282698 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-czzvh" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.291577 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-lpv6f" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.292694 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-tmr9d"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.297658 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-67cns" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.307961 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-p6vp2" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.317460 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6n5wc\" (UniqueName: \"kubernetes.io/projected/32886d79-72a7-4318-8098-718f0f55f61e-kube-api-access-6n5wc\") pod \"glance-operator-controller-manager-77987cd8cd-ldclc\" (UID: \"32886d79-72a7-4318-8098-718f0f55f61e\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ldclc" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.353827 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-czzvh"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.381997 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-928gw\" (UniqueName: \"kubernetes.io/projected/f0089345-8234-4ea7-9fbe-528afe9d5fc0-kube-api-access-928gw\") pod \"keystone-operator-controller-manager-7765d96ddf-xdm6x\" (UID: \"f0089345-8234-4ea7-9fbe-528afe9d5fc0\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xdm6x" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.382302 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzfdq\" (UniqueName: \"kubernetes.io/projected/41a5b9af-e0eb-46d8-84f0-0962dd72367c-kube-api-access-gzfdq\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-2hbp2\" (UID: \"41a5b9af-e0eb-46d8-84f0-0962dd72367c\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2hbp2" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.382407 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtcgk\" (UniqueName: \"kubernetes.io/projected/fdf92431-a279-4eb5-8e5d-56e353febcf2-kube-api-access-rtcgk\") pod \"infra-operator-controller-manager-57548d458d-57k7z\" (UID: \"fdf92431-a279-4eb5-8e5d-56e353febcf2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-57k7z" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.382488 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrmdt\" (UniqueName: \"kubernetes.io/projected/5a715ee8-c048-4447-b3fc-5f94121c0e7e-kube-api-access-hrmdt\") pod \"horizon-operator-controller-manager-68c6d99b8f-cz8hk\" (UID: \"5a715ee8-c048-4447-b3fc-5f94121c0e7e\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-cz8hk" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.382594 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8z6l\" (UniqueName: \"kubernetes.io/projected/1d24a40a-06b2-43e4-9921-05dd2e8f27ea-kube-api-access-h8z6l\") pod \"heat-operator-controller-manager-5f64f6f8bb-jwf42\" (UID: \"1d24a40a-06b2-43e4-9921-05dd2e8f27ea\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jwf42" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.382669 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkftb\" (UniqueName: \"kubernetes.io/projected/72f35a0d-fa67-44c7-a25c-b720885d5708-kube-api-access-kkftb\") pod \"manila-operator-controller-manager-7c79b5df47-sw9bk\" (UID: \"72f35a0d-fa67-44c7-a25c-b720885d5708\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sw9bk" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.382769 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fdf92431-a279-4eb5-8e5d-56e353febcf2-cert\") pod \"infra-operator-controller-manager-57548d458d-57k7z\" (UID: \"fdf92431-a279-4eb5-8e5d-56e353febcf2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-57k7z" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.382850 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6dfx\" (UniqueName: \"kubernetes.io/projected/b8212ac4-255e-4de2-ac13-0033682d7550-kube-api-access-m6dfx\") pod \"ironic-operator-controller-manager-6c548fd776-2bzwd\" (UID: \"b8212ac4-255e-4de2-ac13-0033682d7550\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-2bzwd" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.382962 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blv7d\" (UniqueName: \"kubernetes.io/projected/76c2990b-dff1-4715-8517-28cff884cf12-kube-api-access-blv7d\") pod \"mariadb-operator-controller-manager-56bbcc9d85-tmr9d\" (UID: \"76c2990b-dff1-4715-8517-28cff884cf12\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-tmr9d" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.389331 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-sw9bk"] Dec 04 17:46:46 crc kubenswrapper[4631]: E1204 17:46:46.398546 4631 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 04 17:46:46 crc kubenswrapper[4631]: E1204 17:46:46.398815 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fdf92431-a279-4eb5-8e5d-56e353febcf2-cert podName:fdf92431-a279-4eb5-8e5d-56e353febcf2 nodeName:}" failed. No retries permitted until 2025-12-04 17:46:46.898793125 +0000 UTC m=+1136.931035133 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fdf92431-a279-4eb5-8e5d-56e353febcf2-cert") pod "infra-operator-controller-manager-57548d458d-57k7z" (UID: "fdf92431-a279-4eb5-8e5d-56e353febcf2") : secret "infra-operator-webhook-server-cert" not found Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.420849 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-66x65"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.421835 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-66x65" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.422043 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8z6l\" (UniqueName: \"kubernetes.io/projected/1d24a40a-06b2-43e4-9921-05dd2e8f27ea-kube-api-access-h8z6l\") pod \"heat-operator-controller-manager-5f64f6f8bb-jwf42\" (UID: \"1d24a40a-06b2-43e4-9921-05dd2e8f27ea\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jwf42" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.429005 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2hbp2"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.429558 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-589dj" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.456676 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jwf42" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.467707 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtcgk\" (UniqueName: \"kubernetes.io/projected/fdf92431-a279-4eb5-8e5d-56e353febcf2-kube-api-access-rtcgk\") pod \"infra-operator-controller-manager-57548d458d-57k7z\" (UID: \"fdf92431-a279-4eb5-8e5d-56e353febcf2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-57k7z" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.472592 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrmdt\" (UniqueName: \"kubernetes.io/projected/5a715ee8-c048-4447-b3fc-5f94121c0e7e-kube-api-access-hrmdt\") pod \"horizon-operator-controller-manager-68c6d99b8f-cz8hk\" (UID: \"5a715ee8-c048-4447-b3fc-5f94121c0e7e\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-cz8hk" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.473126 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6dfx\" (UniqueName: \"kubernetes.io/projected/b8212ac4-255e-4de2-ac13-0033682d7550-kube-api-access-m6dfx\") pod \"ironic-operator-controller-manager-6c548fd776-2bzwd\" (UID: \"b8212ac4-255e-4de2-ac13-0033682d7550\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-2bzwd" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.473779 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-66x65"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.488595 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-cz8hk" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.490555 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blv7d\" (UniqueName: \"kubernetes.io/projected/76c2990b-dff1-4715-8517-28cff884cf12-kube-api-access-blv7d\") pod \"mariadb-operator-controller-manager-56bbcc9d85-tmr9d\" (UID: \"76c2990b-dff1-4715-8517-28cff884cf12\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-tmr9d" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.490605 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgzwv\" (UniqueName: \"kubernetes.io/projected/becd7035-989e-497f-96ad-7eaa0d7e4456-kube-api-access-cgzwv\") pod \"octavia-operator-controller-manager-998648c74-66x65\" (UID: \"becd7035-989e-497f-96ad-7eaa0d7e4456\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-66x65" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.490627 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-928gw\" (UniqueName: \"kubernetes.io/projected/f0089345-8234-4ea7-9fbe-528afe9d5fc0-kube-api-access-928gw\") pod \"keystone-operator-controller-manager-7765d96ddf-xdm6x\" (UID: \"f0089345-8234-4ea7-9fbe-528afe9d5fc0\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xdm6x" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.490680 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzfdq\" (UniqueName: \"kubernetes.io/projected/41a5b9af-e0eb-46d8-84f0-0962dd72367c-kube-api-access-gzfdq\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-2hbp2\" (UID: \"41a5b9af-e0eb-46d8-84f0-0962dd72367c\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2hbp2" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.490714 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9vbw\" (UniqueName: \"kubernetes.io/projected/22b6958b-a18a-49c1-b6a4-28b3ebad0846-kube-api-access-p9vbw\") pod \"nova-operator-controller-manager-697bc559fc-czzvh\" (UID: \"22b6958b-a18a-49c1-b6a4-28b3ebad0846\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-czzvh" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.490758 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkftb\" (UniqueName: \"kubernetes.io/projected/72f35a0d-fa67-44c7-a25c-b720885d5708-kube-api-access-kkftb\") pod \"manila-operator-controller-manager-7c79b5df47-sw9bk\" (UID: \"72f35a0d-fa67-44c7-a25c-b720885d5708\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sw9bk" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.517448 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-xc5m4"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.518364 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-2bzwd" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.518501 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xc5m4" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.519018 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkftb\" (UniqueName: \"kubernetes.io/projected/72f35a0d-fa67-44c7-a25c-b720885d5708-kube-api-access-kkftb\") pod \"manila-operator-controller-manager-7c79b5df47-sw9bk\" (UID: \"72f35a0d-fa67-44c7-a25c-b720885d5708\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sw9bk" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.527632 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-pjvjl" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.538484 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blv7d\" (UniqueName: \"kubernetes.io/projected/76c2990b-dff1-4715-8517-28cff884cf12-kube-api-access-blv7d\") pod \"mariadb-operator-controller-manager-56bbcc9d85-tmr9d\" (UID: \"76c2990b-dff1-4715-8517-28cff884cf12\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-tmr9d" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.539773 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.540789 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.542388 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-n7j4p" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.543359 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.549138 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzfdq\" (UniqueName: \"kubernetes.io/projected/41a5b9af-e0eb-46d8-84f0-0962dd72367c-kube-api-access-gzfdq\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-2hbp2\" (UID: \"41a5b9af-e0eb-46d8-84f0-0962dd72367c\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2hbp2" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.553906 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-928gw\" (UniqueName: \"kubernetes.io/projected/f0089345-8234-4ea7-9fbe-528afe9d5fc0-kube-api-access-928gw\") pod \"keystone-operator-controller-manager-7765d96ddf-xdm6x\" (UID: \"f0089345-8234-4ea7-9fbe-528afe9d5fc0\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xdm6x" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.575461 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-xc5m4"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.585897 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.592148 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e20ca639-4732-4b27-b2e2-8d4cc9374515-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq\" (UID: \"e20ca639-4732-4b27-b2e2-8d4cc9374515\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.592189 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44vst\" (UniqueName: \"kubernetes.io/projected/e20ca639-4732-4b27-b2e2-8d4cc9374515-kube-api-access-44vst\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq\" (UID: \"e20ca639-4732-4b27-b2e2-8d4cc9374515\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.592221 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9vbw\" (UniqueName: \"kubernetes.io/projected/22b6958b-a18a-49c1-b6a4-28b3ebad0846-kube-api-access-p9vbw\") pod \"nova-operator-controller-manager-697bc559fc-czzvh\" (UID: \"22b6958b-a18a-49c1-b6a4-28b3ebad0846\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-czzvh" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.592267 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bw2qx\" (UniqueName: \"kubernetes.io/projected/1c44bc20-c171-4476-a959-9e31d9bbac58-kube-api-access-bw2qx\") pod \"ovn-operator-controller-manager-b6456fdb6-xc5m4\" (UID: \"1c44bc20-c171-4476-a959-9e31d9bbac58\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xc5m4" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.592300 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgzwv\" (UniqueName: \"kubernetes.io/projected/becd7035-989e-497f-96ad-7eaa0d7e4456-kube-api-access-cgzwv\") pod \"octavia-operator-controller-manager-998648c74-66x65\" (UID: \"becd7035-989e-497f-96ad-7eaa0d7e4456\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-66x65" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.618869 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ldclc" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.659478 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9vbw\" (UniqueName: \"kubernetes.io/projected/22b6958b-a18a-49c1-b6a4-28b3ebad0846-kube-api-access-p9vbw\") pod \"nova-operator-controller-manager-697bc559fc-czzvh\" (UID: \"22b6958b-a18a-49c1-b6a4-28b3ebad0846\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-czzvh" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.671809 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-df7fm"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.675664 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-df7fm" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.692242 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xdm6x" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.693356 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sw9bk" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.694567 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e20ca639-4732-4b27-b2e2-8d4cc9374515-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq\" (UID: \"e20ca639-4732-4b27-b2e2-8d4cc9374515\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.694677 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44vst\" (UniqueName: \"kubernetes.io/projected/e20ca639-4732-4b27-b2e2-8d4cc9374515-kube-api-access-44vst\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq\" (UID: \"e20ca639-4732-4b27-b2e2-8d4cc9374515\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.694770 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jpwg\" (UniqueName: \"kubernetes.io/projected/7e9e43c6-516b-4195-9d65-e6e80544bb7d-kube-api-access-9jpwg\") pod \"placement-operator-controller-manager-78f8948974-df7fm\" (UID: \"7e9e43c6-516b-4195-9d65-e6e80544bb7d\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-df7fm" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.694813 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bw2qx\" (UniqueName: \"kubernetes.io/projected/1c44bc20-c171-4476-a959-9e31d9bbac58-kube-api-access-bw2qx\") pod \"ovn-operator-controller-manager-b6456fdb6-xc5m4\" (UID: \"1c44bc20-c171-4476-a959-9e31d9bbac58\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xc5m4" Dec 04 17:46:46 crc kubenswrapper[4631]: E1204 17:46:46.695219 4631 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 17:46:46 crc kubenswrapper[4631]: E1204 17:46:46.695262 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e20ca639-4732-4b27-b2e2-8d4cc9374515-cert podName:e20ca639-4732-4b27-b2e2-8d4cc9374515 nodeName:}" failed. No retries permitted until 2025-12-04 17:46:47.19524953 +0000 UTC m=+1137.227491518 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e20ca639-4732-4b27-b2e2-8d4cc9374515-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" (UID: "e20ca639-4732-4b27-b2e2-8d4cc9374515") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.695699 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-t7n8l" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.698807 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgzwv\" (UniqueName: \"kubernetes.io/projected/becd7035-989e-497f-96ad-7eaa0d7e4456-kube-api-access-cgzwv\") pod \"octavia-operator-controller-manager-998648c74-66x65\" (UID: \"becd7035-989e-497f-96ad-7eaa0d7e4456\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-66x65" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.713041 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bw2qx\" (UniqueName: \"kubernetes.io/projected/1c44bc20-c171-4476-a959-9e31d9bbac58-kube-api-access-bw2qx\") pod \"ovn-operator-controller-manager-b6456fdb6-xc5m4\" (UID: \"1c44bc20-c171-4476-a959-9e31d9bbac58\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xc5m4" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.714601 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-tmr9d" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.726403 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-grlw8"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.728822 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-grlw8" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.734343 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2hbp2" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.738790 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44vst\" (UniqueName: \"kubernetes.io/projected/e20ca639-4732-4b27-b2e2-8d4cc9374515-kube-api-access-44vst\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq\" (UID: \"e20ca639-4732-4b27-b2e2-8d4cc9374515\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.754002 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-xzctw" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.758281 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-czzvh" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.801098 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-f5lq4"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.802889 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-f5lq4" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.808542 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-df7fm"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.819393 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fccvp\" (UniqueName: \"kubernetes.io/projected/acce4f1e-311d-44da-aaf9-a2cddc75be35-kube-api-access-fccvp\") pod \"swift-operator-controller-manager-5f8c65bbfc-grlw8\" (UID: \"acce4f1e-311d-44da-aaf9-a2cddc75be35\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-grlw8" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.819432 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jpwg\" (UniqueName: \"kubernetes.io/projected/7e9e43c6-516b-4195-9d65-e6e80544bb7d-kube-api-access-9jpwg\") pod \"placement-operator-controller-manager-78f8948974-df7fm\" (UID: \"7e9e43c6-516b-4195-9d65-e6e80544bb7d\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-df7fm" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.819464 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzzkx\" (UniqueName: \"kubernetes.io/projected/f05ded75-e10d-41ed-921d-0ba118f3453d-kube-api-access-jzzkx\") pod \"telemetry-operator-controller-manager-76cc84c6bb-f5lq4\" (UID: \"f05ded75-e10d-41ed-921d-0ba118f3453d\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-f5lq4" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.829508 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-66x65" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.842052 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-gv4c8" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.851655 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xc5m4" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.876880 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-grlw8"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.891849 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jpwg\" (UniqueName: \"kubernetes.io/projected/7e9e43c6-516b-4195-9d65-e6e80544bb7d-kube-api-access-9jpwg\") pod \"placement-operator-controller-manager-78f8948974-df7fm\" (UID: \"7e9e43c6-516b-4195-9d65-e6e80544bb7d\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-df7fm" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.906270 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-f5lq4"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.920446 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzzkx\" (UniqueName: \"kubernetes.io/projected/f05ded75-e10d-41ed-921d-0ba118f3453d-kube-api-access-jzzkx\") pod \"telemetry-operator-controller-manager-76cc84c6bb-f5lq4\" (UID: \"f05ded75-e10d-41ed-921d-0ba118f3453d\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-f5lq4" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.920589 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fccvp\" (UniqueName: \"kubernetes.io/projected/acce4f1e-311d-44da-aaf9-a2cddc75be35-kube-api-access-fccvp\") pod \"swift-operator-controller-manager-5f8c65bbfc-grlw8\" (UID: \"acce4f1e-311d-44da-aaf9-a2cddc75be35\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-grlw8" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.920619 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fdf92431-a279-4eb5-8e5d-56e353febcf2-cert\") pod \"infra-operator-controller-manager-57548d458d-57k7z\" (UID: \"fdf92431-a279-4eb5-8e5d-56e353febcf2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-57k7z" Dec 04 17:46:46 crc kubenswrapper[4631]: E1204 17:46:46.920735 4631 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 04 17:46:46 crc kubenswrapper[4631]: E1204 17:46:46.920781 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fdf92431-a279-4eb5-8e5d-56e353febcf2-cert podName:fdf92431-a279-4eb5-8e5d-56e353febcf2 nodeName:}" failed. No retries permitted until 2025-12-04 17:46:47.920767603 +0000 UTC m=+1137.953009591 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fdf92431-a279-4eb5-8e5d-56e353febcf2-cert") pod "infra-operator-controller-manager-57548d458d-57k7z" (UID: "fdf92431-a279-4eb5-8e5d-56e353febcf2") : secret "infra-operator-webhook-server-cert" not found Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.923641 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-r28bs"] Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.930803 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-r28bs" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.938561 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-bd597" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.959599 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fccvp\" (UniqueName: \"kubernetes.io/projected/acce4f1e-311d-44da-aaf9-a2cddc75be35-kube-api-access-fccvp\") pod \"swift-operator-controller-manager-5f8c65bbfc-grlw8\" (UID: \"acce4f1e-311d-44da-aaf9-a2cddc75be35\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-grlw8" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.961883 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzzkx\" (UniqueName: \"kubernetes.io/projected/f05ded75-e10d-41ed-921d-0ba118f3453d-kube-api-access-jzzkx\") pod \"telemetry-operator-controller-manager-76cc84c6bb-f5lq4\" (UID: \"f05ded75-e10d-41ed-921d-0ba118f3453d\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-f5lq4" Dec 04 17:46:46 crc kubenswrapper[4631]: I1204 17:46:46.972697 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-r28bs"] Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.016891 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-df7fm" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.023028 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-bqs5m"] Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.024424 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-bqs5m" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.040558 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-c5ftd" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.053980 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-bqs5m"] Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.060648 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-grlw8" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.124240 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnqhv\" (UniqueName: \"kubernetes.io/projected/a19a11a8-a149-4b75-ab68-359723dcfbcb-kube-api-access-hnqhv\") pod \"test-operator-controller-manager-5854674fcc-r28bs\" (UID: \"a19a11a8-a149-4b75-ab68-359723dcfbcb\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-r28bs" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.125201 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpddb\" (UniqueName: \"kubernetes.io/projected/7cf50b74-b958-4f66-aefc-2ad897abdec2-kube-api-access-kpddb\") pod \"watcher-operator-controller-manager-769dc69bc-bqs5m\" (UID: \"7cf50b74-b958-4f66-aefc-2ad897abdec2\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-bqs5m" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.154058 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh"] Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.155168 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.162917 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.163123 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-stdtw" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.163219 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.169813 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-f5lq4" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.224993 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh"] Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.226445 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpddb\" (UniqueName: \"kubernetes.io/projected/7cf50b74-b958-4f66-aefc-2ad897abdec2-kube-api-access-kpddb\") pod \"watcher-operator-controller-manager-769dc69bc-bqs5m\" (UID: \"7cf50b74-b958-4f66-aefc-2ad897abdec2\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-bqs5m" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.226528 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e20ca639-4732-4b27-b2e2-8d4cc9374515-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq\" (UID: \"e20ca639-4732-4b27-b2e2-8d4cc9374515\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.226585 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnqhv\" (UniqueName: \"kubernetes.io/projected/a19a11a8-a149-4b75-ab68-359723dcfbcb-kube-api-access-hnqhv\") pod \"test-operator-controller-manager-5854674fcc-r28bs\" (UID: \"a19a11a8-a149-4b75-ab68-359723dcfbcb\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-r28bs" Dec 04 17:46:47 crc kubenswrapper[4631]: E1204 17:46:47.227001 4631 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 17:46:47 crc kubenswrapper[4631]: E1204 17:46:47.227038 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e20ca639-4732-4b27-b2e2-8d4cc9374515-cert podName:e20ca639-4732-4b27-b2e2-8d4cc9374515 nodeName:}" failed. No retries permitted until 2025-12-04 17:46:48.227026381 +0000 UTC m=+1138.259268369 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e20ca639-4732-4b27-b2e2-8d4cc9374515-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" (UID: "e20ca639-4732-4b27-b2e2-8d4cc9374515") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.265482 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnqhv\" (UniqueName: \"kubernetes.io/projected/a19a11a8-a149-4b75-ab68-359723dcfbcb-kube-api-access-hnqhv\") pod \"test-operator-controller-manager-5854674fcc-r28bs\" (UID: \"a19a11a8-a149-4b75-ab68-359723dcfbcb\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-r28bs" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.273840 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpddb\" (UniqueName: \"kubernetes.io/projected/7cf50b74-b958-4f66-aefc-2ad897abdec2-kube-api-access-kpddb\") pod \"watcher-operator-controller-manager-769dc69bc-bqs5m\" (UID: \"7cf50b74-b958-4f66-aefc-2ad897abdec2\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-bqs5m" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.279357 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-r28bs" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.304447 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ct9z5"] Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.305396 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ct9z5"] Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.305488 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ct9z5" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.310176 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-jkp7t" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.332013 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkjd4\" (UniqueName: \"kubernetes.io/projected/8eceb916-5479-43f0-a3f4-75d0643adcab-kube-api-access-lkjd4\") pod \"openstack-operator-controller-manager-f65bcfbd6-zphvh\" (UID: \"8eceb916-5479-43f0-a3f4-75d0643adcab\") " pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.332425 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-webhook-certs\") pod \"openstack-operator-controller-manager-f65bcfbd6-zphvh\" (UID: \"8eceb916-5479-43f0-a3f4-75d0643adcab\") " pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.332470 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-metrics-certs\") pod \"openstack-operator-controller-manager-f65bcfbd6-zphvh\" (UID: \"8eceb916-5479-43f0-a3f4-75d0643adcab\") " pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.332579 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-67cns"] Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.405437 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-p6vp2"] Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.434448 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-metrics-certs\") pod \"openstack-operator-controller-manager-f65bcfbd6-zphvh\" (UID: \"8eceb916-5479-43f0-a3f4-75d0643adcab\") " pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.434528 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvhc4\" (UniqueName: \"kubernetes.io/projected/8c4e7e86-5efa-4888-a717-2dcafc489144-kube-api-access-vvhc4\") pod \"rabbitmq-cluster-operator-manager-668c99d594-ct9z5\" (UID: \"8c4e7e86-5efa-4888-a717-2dcafc489144\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ct9z5" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.434579 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkjd4\" (UniqueName: \"kubernetes.io/projected/8eceb916-5479-43f0-a3f4-75d0643adcab-kube-api-access-lkjd4\") pod \"openstack-operator-controller-manager-f65bcfbd6-zphvh\" (UID: \"8eceb916-5479-43f0-a3f4-75d0643adcab\") " pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.434620 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-webhook-certs\") pod \"openstack-operator-controller-manager-f65bcfbd6-zphvh\" (UID: \"8eceb916-5479-43f0-a3f4-75d0643adcab\") " pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:46:47 crc kubenswrapper[4631]: E1204 17:46:47.434744 4631 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 04 17:46:47 crc kubenswrapper[4631]: E1204 17:46:47.434805 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-webhook-certs podName:8eceb916-5479-43f0-a3f4-75d0643adcab nodeName:}" failed. No retries permitted until 2025-12-04 17:46:47.934790403 +0000 UTC m=+1137.967032401 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-webhook-certs") pod "openstack-operator-controller-manager-f65bcfbd6-zphvh" (UID: "8eceb916-5479-43f0-a3f4-75d0643adcab") : secret "webhook-server-cert" not found Dec 04 17:46:47 crc kubenswrapper[4631]: E1204 17:46:47.434848 4631 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 04 17:46:47 crc kubenswrapper[4631]: E1204 17:46:47.434868 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-metrics-certs podName:8eceb916-5479-43f0-a3f4-75d0643adcab nodeName:}" failed. No retries permitted until 2025-12-04 17:46:47.934862015 +0000 UTC m=+1137.967104013 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-metrics-certs") pod "openstack-operator-controller-manager-f65bcfbd6-zphvh" (UID: "8eceb916-5479-43f0-a3f4-75d0643adcab") : secret "metrics-server-cert" not found Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.479491 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-bqs5m" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.491504 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkjd4\" (UniqueName: \"kubernetes.io/projected/8eceb916-5479-43f0-a3f4-75d0643adcab-kube-api-access-lkjd4\") pod \"openstack-operator-controller-manager-f65bcfbd6-zphvh\" (UID: \"8eceb916-5479-43f0-a3f4-75d0643adcab\") " pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.537099 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvhc4\" (UniqueName: \"kubernetes.io/projected/8c4e7e86-5efa-4888-a717-2dcafc489144-kube-api-access-vvhc4\") pod \"rabbitmq-cluster-operator-manager-668c99d594-ct9z5\" (UID: \"8c4e7e86-5efa-4888-a717-2dcafc489144\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ct9z5" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.543440 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-9s4jp"] Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.570323 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvhc4\" (UniqueName: \"kubernetes.io/projected/8c4e7e86-5efa-4888-a717-2dcafc489144-kube-api-access-vvhc4\") pod \"rabbitmq-cluster-operator-manager-668c99d594-ct9z5\" (UID: \"8c4e7e86-5efa-4888-a717-2dcafc489144\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ct9z5" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.689044 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ct9z5" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.762185 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-p6vp2" event={"ID":"dd2fd0ee-2bee-4cd2-9c24-a0c0dce37b46","Type":"ContainerStarted","Data":"85aae70b61d33668f784632a39013a31b72269d34d62b8c57ad438eb6e0895e0"} Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.770793 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-9s4jp" event={"ID":"709a39e5-9fe0-4861-8761-774f26a4a315","Type":"ContainerStarted","Data":"23021ffa88aed933888d54cbf523258ea28303bfc795675a9e92dbb66ec51be5"} Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.781755 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-67cns" event={"ID":"30be0340-cc50-4244-9b27-7e41f86bf113","Type":"ContainerStarted","Data":"7f855a1ee16607736f16aad4d76ad58ca965584bfef3382acefaccdc3c53b380"} Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.862420 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jwf42"] Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.873826 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-xdm6x"] Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.886029 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-cz8hk"] Dec 04 17:46:47 crc kubenswrapper[4631]: W1204 17:46:47.897456 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf0089345_8234_4ea7_9fbe_528afe9d5fc0.slice/crio-c7a2027f6470401dd71e14d9719d7ecf18fd4ea38df4d01ee2b0eb841b23b3a6 WatchSource:0}: Error finding container c7a2027f6470401dd71e14d9719d7ecf18fd4ea38df4d01ee2b0eb841b23b3a6: Status 404 returned error can't find the container with id c7a2027f6470401dd71e14d9719d7ecf18fd4ea38df4d01ee2b0eb841b23b3a6 Dec 04 17:46:47 crc kubenswrapper[4631]: W1204 17:46:47.898248 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5a715ee8_c048_4447_b3fc_5f94121c0e7e.slice/crio-5a2cc8dec8a86d213588277c21e4266723f5af20e0349ca967ba4cb4d732fd50 WatchSource:0}: Error finding container 5a2cc8dec8a86d213588277c21e4266723f5af20e0349ca967ba4cb4d732fd50: Status 404 returned error can't find the container with id 5a2cc8dec8a86d213588277c21e4266723f5af20e0349ca967ba4cb4d732fd50 Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.943485 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-metrics-certs\") pod \"openstack-operator-controller-manager-f65bcfbd6-zphvh\" (UID: \"8eceb916-5479-43f0-a3f4-75d0643adcab\") " pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.943598 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fdf92431-a279-4eb5-8e5d-56e353febcf2-cert\") pod \"infra-operator-controller-manager-57548d458d-57k7z\" (UID: \"fdf92431-a279-4eb5-8e5d-56e353febcf2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-57k7z" Dec 04 17:46:47 crc kubenswrapper[4631]: I1204 17:46:47.943622 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-webhook-certs\") pod \"openstack-operator-controller-manager-f65bcfbd6-zphvh\" (UID: \"8eceb916-5479-43f0-a3f4-75d0643adcab\") " pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:46:47 crc kubenswrapper[4631]: E1204 17:46:47.943752 4631 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 04 17:46:47 crc kubenswrapper[4631]: E1204 17:46:47.943800 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-webhook-certs podName:8eceb916-5479-43f0-a3f4-75d0643adcab nodeName:}" failed. No retries permitted until 2025-12-04 17:46:48.943785898 +0000 UTC m=+1138.976027896 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-webhook-certs") pod "openstack-operator-controller-manager-f65bcfbd6-zphvh" (UID: "8eceb916-5479-43f0-a3f4-75d0643adcab") : secret "webhook-server-cert" not found Dec 04 17:46:47 crc kubenswrapper[4631]: E1204 17:46:47.944960 4631 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 04 17:46:47 crc kubenswrapper[4631]: E1204 17:46:47.944993 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-metrics-certs podName:8eceb916-5479-43f0-a3f4-75d0643adcab nodeName:}" failed. No retries permitted until 2025-12-04 17:46:48.944984943 +0000 UTC m=+1138.977226941 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-metrics-certs") pod "openstack-operator-controller-manager-f65bcfbd6-zphvh" (UID: "8eceb916-5479-43f0-a3f4-75d0643adcab") : secret "metrics-server-cert" not found Dec 04 17:46:47 crc kubenswrapper[4631]: E1204 17:46:47.945029 4631 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 04 17:46:47 crc kubenswrapper[4631]: E1204 17:46:47.945047 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fdf92431-a279-4eb5-8e5d-56e353febcf2-cert podName:fdf92431-a279-4eb5-8e5d-56e353febcf2 nodeName:}" failed. No retries permitted until 2025-12-04 17:46:49.945041375 +0000 UTC m=+1139.977283373 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fdf92431-a279-4eb5-8e5d-56e353febcf2-cert") pod "infra-operator-controller-manager-57548d458d-57k7z" (UID: "fdf92431-a279-4eb5-8e5d-56e353febcf2") : secret "infra-operator-webhook-server-cert" not found Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.092768 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2hbp2"] Dec 04 17:46:48 crc kubenswrapper[4631]: W1204 17:46:48.093520 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod41a5b9af_e0eb_46d8_84f0_0962dd72367c.slice/crio-54dfe8470c2a3e054bebc0040156fab247bcb4d905ca91757da2cd6cc861a5bd WatchSource:0}: Error finding container 54dfe8470c2a3e054bebc0040156fab247bcb4d905ca91757da2cd6cc861a5bd: Status 404 returned error can't find the container with id 54dfe8470c2a3e054bebc0040156fab247bcb4d905ca91757da2cd6cc861a5bd Dec 04 17:46:48 crc kubenswrapper[4631]: W1204 17:46:48.212794 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb8212ac4_255e_4de2_ac13_0033682d7550.slice/crio-71b7b99670d96300d3210978a78a4927badb865457c3a1518531bed3dbc591eb WatchSource:0}: Error finding container 71b7b99670d96300d3210978a78a4927badb865457c3a1518531bed3dbc591eb: Status 404 returned error can't find the container with id 71b7b99670d96300d3210978a78a4927badb865457c3a1518531bed3dbc591eb Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.215166 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-2bzwd"] Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.253590 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e20ca639-4732-4b27-b2e2-8d4cc9374515-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq\" (UID: \"e20ca639-4732-4b27-b2e2-8d4cc9374515\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.253844 4631 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.253899 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e20ca639-4732-4b27-b2e2-8d4cc9374515-cert podName:e20ca639-4732-4b27-b2e2-8d4cc9374515 nodeName:}" failed. No retries permitted until 2025-12-04 17:46:50.253882027 +0000 UTC m=+1140.286124035 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e20ca639-4732-4b27-b2e2-8d4cc9374515-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" (UID: "e20ca639-4732-4b27-b2e2-8d4cc9374515") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.303892 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-tmr9d"] Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.303928 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-ldclc"] Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.370769 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-czzvh"] Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.382351 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-df7fm"] Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.387116 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-r28bs"] Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.456300 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-sw9bk"] Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.474767 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-xc5m4"] Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.484587 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ct9z5"] Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.500455 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-grlw8"] Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.514485 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-bqs5m"] Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.522392 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-66x65"] Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.525671 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-f5lq4"] Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.533680 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fccvp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-grlw8_openstack-operators(acce4f1e-311d-44da-aaf9-a2cddc75be35): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.537961 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fccvp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-grlw8_openstack-operators(acce4f1e-311d-44da-aaf9-a2cddc75be35): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.540057 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-grlw8" podUID="acce4f1e-311d-44da-aaf9-a2cddc75be35" Dec 04 17:46:48 crc kubenswrapper[4631]: W1204 17:46:48.555274 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c4e7e86_5efa_4888_a717_2dcafc489144.slice/crio-73ead9b30b3f038e8e054ba7bac29c95c5f817c7cbc700d0570204785639dc7c WatchSource:0}: Error finding container 73ead9b30b3f038e8e054ba7bac29c95c5f817c7cbc700d0570204785639dc7c: Status 404 returned error can't find the container with id 73ead9b30b3f038e8e054ba7bac29c95c5f817c7cbc700d0570204785639dc7c Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.580459 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vvhc4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-ct9z5_openstack-operators(8c4e7e86-5efa-4888-a717-2dcafc489144): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.580562 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jzzkx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-f5lq4_openstack-operators(f05ded75-e10d-41ed-921d-0ba118f3453d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.580643 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cgzwv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-66x65_openstack-operators(becd7035-989e-497f-96ad-7eaa0d7e4456): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.581750 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ct9z5" podUID="8c4e7e86-5efa-4888-a717-2dcafc489144" Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.583239 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jzzkx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-f5lq4_openstack-operators(f05ded75-e10d-41ed-921d-0ba118f3453d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.583308 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cgzwv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-66x65_openstack-operators(becd7035-989e-497f-96ad-7eaa0d7e4456): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.585000 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-66x65" podUID="becd7035-989e-497f-96ad-7eaa0d7e4456" Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.585413 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-f5lq4" podUID="f05ded75-e10d-41ed-921d-0ba118f3453d" Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.798555 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-df7fm" event={"ID":"7e9e43c6-516b-4195-9d65-e6e80544bb7d","Type":"ContainerStarted","Data":"94d446ba9092f0989957f2b359eb3476972d4b31b77f381a233e84b2cb1aa81e"} Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.803082 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-tmr9d" event={"ID":"76c2990b-dff1-4715-8517-28cff884cf12","Type":"ContainerStarted","Data":"670be90c6c42933b77f2e56901ede07b874befd19eba76ff381e38d8eec98cf0"} Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.806186 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sw9bk" event={"ID":"72f35a0d-fa67-44c7-a25c-b720885d5708","Type":"ContainerStarted","Data":"d4b76de9ad085f80894ca21095008c068b005221e5d87884164c685fc96f59df"} Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.819056 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ct9z5" event={"ID":"8c4e7e86-5efa-4888-a717-2dcafc489144","Type":"ContainerStarted","Data":"73ead9b30b3f038e8e054ba7bac29c95c5f817c7cbc700d0570204785639dc7c"} Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.821004 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ct9z5" podUID="8c4e7e86-5efa-4888-a717-2dcafc489144" Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.822929 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xc5m4" event={"ID":"1c44bc20-c171-4476-a959-9e31d9bbac58","Type":"ContainerStarted","Data":"5ae56c586233af820fe2bb86bb69c9592aaea499da1b829caa33dac470faecd6"} Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.835349 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-czzvh" event={"ID":"22b6958b-a18a-49c1-b6a4-28b3ebad0846","Type":"ContainerStarted","Data":"92759d6e6f74e0ebbd41df7267c23c21e66e5f5d60a0fb08035e6d2bb8939f47"} Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.841150 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ldclc" event={"ID":"32886d79-72a7-4318-8098-718f0f55f61e","Type":"ContainerStarted","Data":"73d54aee1da61f90dc714889660010f3f0261af7e07912ddfece04d6163c526e"} Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.849651 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jwf42" event={"ID":"1d24a40a-06b2-43e4-9921-05dd2e8f27ea","Type":"ContainerStarted","Data":"5114cf7f0f22caa43cd5aa2f2423bd18eb1ff68ad2a14712b1b87b76e984facb"} Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.868827 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-bqs5m" event={"ID":"7cf50b74-b958-4f66-aefc-2ad897abdec2","Type":"ContainerStarted","Data":"c04790372796b6574b8072b46d5c7a5b4b57d4c1209b8d6e6bd8c0fd0d7a113c"} Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.871771 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-cz8hk" event={"ID":"5a715ee8-c048-4447-b3fc-5f94121c0e7e","Type":"ContainerStarted","Data":"5a2cc8dec8a86d213588277c21e4266723f5af20e0349ca967ba4cb4d732fd50"} Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.877729 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xdm6x" event={"ID":"f0089345-8234-4ea7-9fbe-528afe9d5fc0","Type":"ContainerStarted","Data":"c7a2027f6470401dd71e14d9719d7ecf18fd4ea38df4d01ee2b0eb841b23b3a6"} Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.879025 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-r28bs" event={"ID":"a19a11a8-a149-4b75-ab68-359723dcfbcb","Type":"ContainerStarted","Data":"c93a45f7b5522743a6f85503dfb6f447adbdd1dd4275e70199b4e97ca722c601"} Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.890808 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-2bzwd" event={"ID":"b8212ac4-255e-4de2-ac13-0033682d7550","Type":"ContainerStarted","Data":"71b7b99670d96300d3210978a78a4927badb865457c3a1518531bed3dbc591eb"} Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.921875 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-f5lq4" event={"ID":"f05ded75-e10d-41ed-921d-0ba118f3453d","Type":"ContainerStarted","Data":"a1e7c3310bb67f896b471fb9caa0999d08e0a0a5478a92ccea340eb1d1571a5e"} Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.926078 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-f5lq4" podUID="f05ded75-e10d-41ed-921d-0ba118f3453d" Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.931876 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2hbp2" event={"ID":"41a5b9af-e0eb-46d8-84f0-0962dd72367c","Type":"ContainerStarted","Data":"54dfe8470c2a3e054bebc0040156fab247bcb4d905ca91757da2cd6cc861a5bd"} Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.935880 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-66x65" event={"ID":"becd7035-989e-497f-96ad-7eaa0d7e4456","Type":"ContainerStarted","Data":"2f7c46a5be39bc7cad9030a12ae9462f1f58b2c8f893489f7acd390b36ab61ca"} Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.978875 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-66x65" podUID="becd7035-989e-497f-96ad-7eaa0d7e4456" Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.979735 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-metrics-certs\") pod \"openstack-operator-controller-manager-f65bcfbd6-zphvh\" (UID: \"8eceb916-5479-43f0-a3f4-75d0643adcab\") " pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.979885 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-webhook-certs\") pod \"openstack-operator-controller-manager-f65bcfbd6-zphvh\" (UID: \"8eceb916-5479-43f0-a3f4-75d0643adcab\") " pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.980024 4631 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.980081 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-webhook-certs podName:8eceb916-5479-43f0-a3f4-75d0643adcab nodeName:}" failed. No retries permitted until 2025-12-04 17:46:50.980054945 +0000 UTC m=+1141.012296943 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-webhook-certs") pod "openstack-operator-controller-manager-f65bcfbd6-zphvh" (UID: "8eceb916-5479-43f0-a3f4-75d0643adcab") : secret "webhook-server-cert" not found Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.980131 4631 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.980156 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-metrics-certs podName:8eceb916-5479-43f0-a3f4-75d0643adcab nodeName:}" failed. No retries permitted until 2025-12-04 17:46:50.980148158 +0000 UTC m=+1141.012390156 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-metrics-certs") pod "openstack-operator-controller-manager-f65bcfbd6-zphvh" (UID: "8eceb916-5479-43f0-a3f4-75d0643adcab") : secret "metrics-server-cert" not found Dec 04 17:46:48 crc kubenswrapper[4631]: I1204 17:46:48.987806 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-grlw8" event={"ID":"acce4f1e-311d-44da-aaf9-a2cddc75be35","Type":"ContainerStarted","Data":"ec27296a97d4ce4b609a426ec55c366e800038e583a3fefa63fa3853dc83645c"} Dec 04 17:46:48 crc kubenswrapper[4631]: E1204 17:46:48.990710 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-grlw8" podUID="acce4f1e-311d-44da-aaf9-a2cddc75be35" Dec 04 17:46:50 crc kubenswrapper[4631]: E1204 17:46:50.004226 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ct9z5" podUID="8c4e7e86-5efa-4888-a717-2dcafc489144" Dec 04 17:46:50 crc kubenswrapper[4631]: E1204 17:46:50.004796 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-f5lq4" podUID="f05ded75-e10d-41ed-921d-0ba118f3453d" Dec 04 17:46:50 crc kubenswrapper[4631]: E1204 17:46:50.007771 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-66x65" podUID="becd7035-989e-497f-96ad-7eaa0d7e4456" Dec 04 17:46:50 crc kubenswrapper[4631]: E1204 17:46:50.007893 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-grlw8" podUID="acce4f1e-311d-44da-aaf9-a2cddc75be35" Dec 04 17:46:50 crc kubenswrapper[4631]: I1204 17:46:50.014513 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fdf92431-a279-4eb5-8e5d-56e353febcf2-cert\") pod \"infra-operator-controller-manager-57548d458d-57k7z\" (UID: \"fdf92431-a279-4eb5-8e5d-56e353febcf2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-57k7z" Dec 04 17:46:50 crc kubenswrapper[4631]: E1204 17:46:50.014731 4631 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 04 17:46:50 crc kubenswrapper[4631]: E1204 17:46:50.014782 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fdf92431-a279-4eb5-8e5d-56e353febcf2-cert podName:fdf92431-a279-4eb5-8e5d-56e353febcf2 nodeName:}" failed. No retries permitted until 2025-12-04 17:46:54.014769966 +0000 UTC m=+1144.047011964 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fdf92431-a279-4eb5-8e5d-56e353febcf2-cert") pod "infra-operator-controller-manager-57548d458d-57k7z" (UID: "fdf92431-a279-4eb5-8e5d-56e353febcf2") : secret "infra-operator-webhook-server-cert" not found Dec 04 17:46:50 crc kubenswrapper[4631]: I1204 17:46:50.323722 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e20ca639-4732-4b27-b2e2-8d4cc9374515-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq\" (UID: \"e20ca639-4732-4b27-b2e2-8d4cc9374515\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" Dec 04 17:46:50 crc kubenswrapper[4631]: E1204 17:46:50.323921 4631 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 17:46:50 crc kubenswrapper[4631]: E1204 17:46:50.323975 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e20ca639-4732-4b27-b2e2-8d4cc9374515-cert podName:e20ca639-4732-4b27-b2e2-8d4cc9374515 nodeName:}" failed. No retries permitted until 2025-12-04 17:46:54.323956478 +0000 UTC m=+1144.356198476 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e20ca639-4732-4b27-b2e2-8d4cc9374515-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" (UID: "e20ca639-4732-4b27-b2e2-8d4cc9374515") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 17:46:51 crc kubenswrapper[4631]: I1204 17:46:51.058136 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-metrics-certs\") pod \"openstack-operator-controller-manager-f65bcfbd6-zphvh\" (UID: \"8eceb916-5479-43f0-a3f4-75d0643adcab\") " pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:46:51 crc kubenswrapper[4631]: I1204 17:46:51.058253 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-webhook-certs\") pod \"openstack-operator-controller-manager-f65bcfbd6-zphvh\" (UID: \"8eceb916-5479-43f0-a3f4-75d0643adcab\") " pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:46:51 crc kubenswrapper[4631]: E1204 17:46:51.058362 4631 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 04 17:46:51 crc kubenswrapper[4631]: E1204 17:46:51.058418 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-webhook-certs podName:8eceb916-5479-43f0-a3f4-75d0643adcab nodeName:}" failed. No retries permitted until 2025-12-04 17:46:55.058406024 +0000 UTC m=+1145.090648022 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-webhook-certs") pod "openstack-operator-controller-manager-f65bcfbd6-zphvh" (UID: "8eceb916-5479-43f0-a3f4-75d0643adcab") : secret "webhook-server-cert" not found Dec 04 17:46:51 crc kubenswrapper[4631]: E1204 17:46:51.058563 4631 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 04 17:46:51 crc kubenswrapper[4631]: E1204 17:46:51.058625 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-metrics-certs podName:8eceb916-5479-43f0-a3f4-75d0643adcab nodeName:}" failed. No retries permitted until 2025-12-04 17:46:55.05860448 +0000 UTC m=+1145.090846468 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-metrics-certs") pod "openstack-operator-controller-manager-f65bcfbd6-zphvh" (UID: "8eceb916-5479-43f0-a3f4-75d0643adcab") : secret "metrics-server-cert" not found Dec 04 17:46:54 crc kubenswrapper[4631]: I1204 17:46:54.077173 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fdf92431-a279-4eb5-8e5d-56e353febcf2-cert\") pod \"infra-operator-controller-manager-57548d458d-57k7z\" (UID: \"fdf92431-a279-4eb5-8e5d-56e353febcf2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-57k7z" Dec 04 17:46:54 crc kubenswrapper[4631]: E1204 17:46:54.077717 4631 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 04 17:46:54 crc kubenswrapper[4631]: E1204 17:46:54.077763 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fdf92431-a279-4eb5-8e5d-56e353febcf2-cert podName:fdf92431-a279-4eb5-8e5d-56e353febcf2 nodeName:}" failed. No retries permitted until 2025-12-04 17:47:02.077750117 +0000 UTC m=+1152.109992115 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fdf92431-a279-4eb5-8e5d-56e353febcf2-cert") pod "infra-operator-controller-manager-57548d458d-57k7z" (UID: "fdf92431-a279-4eb5-8e5d-56e353febcf2") : secret "infra-operator-webhook-server-cert" not found Dec 04 17:46:54 crc kubenswrapper[4631]: I1204 17:46:54.381123 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e20ca639-4732-4b27-b2e2-8d4cc9374515-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq\" (UID: \"e20ca639-4732-4b27-b2e2-8d4cc9374515\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" Dec 04 17:46:54 crc kubenswrapper[4631]: E1204 17:46:54.381294 4631 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 17:46:54 crc kubenswrapper[4631]: E1204 17:46:54.381387 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e20ca639-4732-4b27-b2e2-8d4cc9374515-cert podName:e20ca639-4732-4b27-b2e2-8d4cc9374515 nodeName:}" failed. No retries permitted until 2025-12-04 17:47:02.381352428 +0000 UTC m=+1152.413594426 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e20ca639-4732-4b27-b2e2-8d4cc9374515-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" (UID: "e20ca639-4732-4b27-b2e2-8d4cc9374515") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 04 17:46:55 crc kubenswrapper[4631]: I1204 17:46:55.102549 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-metrics-certs\") pod \"openstack-operator-controller-manager-f65bcfbd6-zphvh\" (UID: \"8eceb916-5479-43f0-a3f4-75d0643adcab\") " pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:46:55 crc kubenswrapper[4631]: I1204 17:46:55.102667 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-webhook-certs\") pod \"openstack-operator-controller-manager-f65bcfbd6-zphvh\" (UID: \"8eceb916-5479-43f0-a3f4-75d0643adcab\") " pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:46:55 crc kubenswrapper[4631]: E1204 17:46:55.102782 4631 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 04 17:46:55 crc kubenswrapper[4631]: E1204 17:46:55.102827 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-webhook-certs podName:8eceb916-5479-43f0-a3f4-75d0643adcab nodeName:}" failed. No retries permitted until 2025-12-04 17:47:03.102814011 +0000 UTC m=+1153.135056009 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-webhook-certs") pod "openstack-operator-controller-manager-f65bcfbd6-zphvh" (UID: "8eceb916-5479-43f0-a3f4-75d0643adcab") : secret "webhook-server-cert" not found Dec 04 17:46:55 crc kubenswrapper[4631]: E1204 17:46:55.102879 4631 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 04 17:46:55 crc kubenswrapper[4631]: E1204 17:46:55.102900 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-metrics-certs podName:8eceb916-5479-43f0-a3f4-75d0643adcab nodeName:}" failed. No retries permitted until 2025-12-04 17:47:03.102892963 +0000 UTC m=+1153.135134961 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-metrics-certs") pod "openstack-operator-controller-manager-f65bcfbd6-zphvh" (UID: "8eceb916-5479-43f0-a3f4-75d0643adcab") : secret "metrics-server-cert" not found Dec 04 17:47:02 crc kubenswrapper[4631]: I1204 17:47:02.115592 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fdf92431-a279-4eb5-8e5d-56e353febcf2-cert\") pod \"infra-operator-controller-manager-57548d458d-57k7z\" (UID: \"fdf92431-a279-4eb5-8e5d-56e353febcf2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-57k7z" Dec 04 17:47:02 crc kubenswrapper[4631]: I1204 17:47:02.122020 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fdf92431-a279-4eb5-8e5d-56e353febcf2-cert\") pod \"infra-operator-controller-manager-57548d458d-57k7z\" (UID: \"fdf92431-a279-4eb5-8e5d-56e353febcf2\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-57k7z" Dec 04 17:47:02 crc kubenswrapper[4631]: I1204 17:47:02.251801 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-rdsg9" Dec 04 17:47:02 crc kubenswrapper[4631]: I1204 17:47:02.260958 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-57k7z" Dec 04 17:47:02 crc kubenswrapper[4631]: I1204 17:47:02.420420 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e20ca639-4732-4b27-b2e2-8d4cc9374515-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq\" (UID: \"e20ca639-4732-4b27-b2e2-8d4cc9374515\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" Dec 04 17:47:02 crc kubenswrapper[4631]: I1204 17:47:02.431494 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e20ca639-4732-4b27-b2e2-8d4cc9374515-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq\" (UID: \"e20ca639-4732-4b27-b2e2-8d4cc9374515\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" Dec 04 17:47:02 crc kubenswrapper[4631]: I1204 17:47:02.489010 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-n7j4p" Dec 04 17:47:02 crc kubenswrapper[4631]: I1204 17:47:02.497215 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" Dec 04 17:47:03 crc kubenswrapper[4631]: I1204 17:47:03.129513 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-webhook-certs\") pod \"openstack-operator-controller-manager-f65bcfbd6-zphvh\" (UID: \"8eceb916-5479-43f0-a3f4-75d0643adcab\") " pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:47:03 crc kubenswrapper[4631]: I1204 17:47:03.129599 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-metrics-certs\") pod \"openstack-operator-controller-manager-f65bcfbd6-zphvh\" (UID: \"8eceb916-5479-43f0-a3f4-75d0643adcab\") " pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:47:03 crc kubenswrapper[4631]: I1204 17:47:03.135986 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-metrics-certs\") pod \"openstack-operator-controller-manager-f65bcfbd6-zphvh\" (UID: \"8eceb916-5479-43f0-a3f4-75d0643adcab\") " pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:47:03 crc kubenswrapper[4631]: I1204 17:47:03.136006 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8eceb916-5479-43f0-a3f4-75d0643adcab-webhook-certs\") pod \"openstack-operator-controller-manager-f65bcfbd6-zphvh\" (UID: \"8eceb916-5479-43f0-a3f4-75d0643adcab\") " pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:47:03 crc kubenswrapper[4631]: I1204 17:47:03.404119 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-stdtw" Dec 04 17:47:03 crc kubenswrapper[4631]: I1204 17:47:03.412465 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:47:05 crc kubenswrapper[4631]: E1204 17:47:05.915047 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9" Dec 04 17:47:05 crc kubenswrapper[4631]: E1204 17:47:05.915423 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kkftb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7c79b5df47-sw9bk_openstack-operators(72f35a0d-fa67-44c7-a25c-b720885d5708): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:47:06 crc kubenswrapper[4631]: I1204 17:47:06.022845 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:47:06 crc kubenswrapper[4631]: I1204 17:47:06.023168 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:47:06 crc kubenswrapper[4631]: E1204 17:47:06.645419 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5" Dec 04 17:47:06 crc kubenswrapper[4631]: E1204 17:47:06.645628 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hrmdt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-cz8hk_openstack-operators(5a715ee8-c048-4447-b3fc-5f94121c0e7e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:47:08 crc kubenswrapper[4631]: E1204 17:47:08.320518 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94" Dec 04 17:47:08 crc kubenswrapper[4631]: E1204 17:47:08.320761 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hnqhv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-r28bs_openstack-operators(a19a11a8-a149-4b75-ab68-359723dcfbcb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:47:10 crc kubenswrapper[4631]: E1204 17:47:10.816155 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429" Dec 04 17:47:10 crc kubenswrapper[4631]: E1204 17:47:10.816875 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-h8z6l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-jwf42_openstack-operators(1d24a40a-06b2-43e4-9921-05dd2e8f27ea): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:47:11 crc kubenswrapper[4631]: E1204 17:47:11.276059 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f" Dec 04 17:47:11 crc kubenswrapper[4631]: E1204 17:47:11.276240 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9jpwg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-df7fm_openstack-operators(7e9e43c6-516b-4195-9d65-e6e80544bb7d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:47:17 crc kubenswrapper[4631]: E1204 17:47:17.248477 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59" Dec 04 17:47:17 crc kubenswrapper[4631]: E1204 17:47:17.249049 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bw2qx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-xc5m4_openstack-operators(1c44bc20-c171-4476-a959-9e31d9bbac58): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:47:17 crc kubenswrapper[4631]: E1204 17:47:17.727215 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7" Dec 04 17:47:17 crc kubenswrapper[4631]: E1204 17:47:17.727483 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-blv7d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-tmr9d_openstack-operators(76c2990b-dff1-4715-8517-28cff884cf12): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:47:23 crc kubenswrapper[4631]: E1204 17:47:23.150281 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 04 17:47:23 crc kubenswrapper[4631]: E1204 17:47:23.151086 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-928gw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-xdm6x_openstack-operators(f0089345-8234-4ea7-9fbe-528afe9d5fc0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:47:23 crc kubenswrapper[4631]: E1204 17:47:23.726961 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 04 17:47:23 crc kubenswrapper[4631]: E1204 17:47:23.727486 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-p9vbw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-czzvh_openstack-operators(22b6958b-a18a-49c1-b6a4-28b3ebad0846): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:47:24 crc kubenswrapper[4631]: I1204 17:47:24.216812 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh"] Dec 04 17:47:24 crc kubenswrapper[4631]: I1204 17:47:24.281260 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-57k7z"] Dec 04 17:47:24 crc kubenswrapper[4631]: I1204 17:47:24.295207 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2hbp2" event={"ID":"41a5b9af-e0eb-46d8-84f0-0962dd72367c","Type":"ContainerStarted","Data":"0e1b7e4c2a7416f8d6adf2ef2c2c97d6b4b5a6cfef7c8922942806ee61dc6b20"} Dec 04 17:47:24 crc kubenswrapper[4631]: I1204 17:47:24.325288 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq"] Dec 04 17:47:24 crc kubenswrapper[4631]: W1204 17:47:24.534400 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8eceb916_5479_43f0_a3f4_75d0643adcab.slice/crio-78412703cb0e636c613eba937c088ce40a40b385726804a8ae5673ac0281794d WatchSource:0}: Error finding container 78412703cb0e636c613eba937c088ce40a40b385726804a8ae5673ac0281794d: Status 404 returned error can't find the container with id 78412703cb0e636c613eba937c088ce40a40b385726804a8ae5673ac0281794d Dec 04 17:47:24 crc kubenswrapper[4631]: W1204 17:47:24.535895 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfdf92431_a279_4eb5_8e5d_56e353febcf2.slice/crio-7f8acc932a651a477052a2508149d2b60aa0ecaadd79ed8b33a971042dc2e79c WatchSource:0}: Error finding container 7f8acc932a651a477052a2508149d2b60aa0ecaadd79ed8b33a971042dc2e79c: Status 404 returned error can't find the container with id 7f8acc932a651a477052a2508149d2b60aa0ecaadd79ed8b33a971042dc2e79c Dec 04 17:47:25 crc kubenswrapper[4631]: I1204 17:47:25.365714 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-p6vp2" event={"ID":"dd2fd0ee-2bee-4cd2-9c24-a0c0dce37b46","Type":"ContainerStarted","Data":"d37399cd447b3e2a0006a8e3587cd74ff508d983239093896727f953e0fcc01c"} Dec 04 17:47:25 crc kubenswrapper[4631]: I1204 17:47:25.373518 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-bqs5m" event={"ID":"7cf50b74-b958-4f66-aefc-2ad897abdec2","Type":"ContainerStarted","Data":"f2cdd5604c4555b9d36811e6f26f8b360f75548f8730b9b7c5bcdef35e30528f"} Dec 04 17:47:25 crc kubenswrapper[4631]: I1204 17:47:25.384195 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-9s4jp" event={"ID":"709a39e5-9fe0-4861-8761-774f26a4a315","Type":"ContainerStarted","Data":"2989c69b71d9d304674a5ad526aac52c4cb83a0fcaa3c41102d92136b6061c36"} Dec 04 17:47:25 crc kubenswrapper[4631]: I1204 17:47:25.394659 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-2bzwd" event={"ID":"b8212ac4-255e-4de2-ac13-0033682d7550","Type":"ContainerStarted","Data":"297de3bb5e20ff84bd32b9c99851f23c689673ea7b8230ad857fd4c6c2e5e95e"} Dec 04 17:47:25 crc kubenswrapper[4631]: I1204 17:47:25.397135 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-f5lq4" event={"ID":"f05ded75-e10d-41ed-921d-0ba118f3453d","Type":"ContainerStarted","Data":"3a3e99ded96533f62219495de69cb5dccddefc321a4faac267258fb2a198aff3"} Dec 04 17:47:25 crc kubenswrapper[4631]: I1204 17:47:25.411310 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" event={"ID":"8eceb916-5479-43f0-a3f4-75d0643adcab","Type":"ContainerStarted","Data":"78412703cb0e636c613eba937c088ce40a40b385726804a8ae5673ac0281794d"} Dec 04 17:47:25 crc kubenswrapper[4631]: I1204 17:47:25.420085 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-57k7z" event={"ID":"fdf92431-a279-4eb5-8e5d-56e353febcf2","Type":"ContainerStarted","Data":"7f8acc932a651a477052a2508149d2b60aa0ecaadd79ed8b33a971042dc2e79c"} Dec 04 17:47:25 crc kubenswrapper[4631]: I1204 17:47:25.436359 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" event={"ID":"e20ca639-4732-4b27-b2e2-8d4cc9374515","Type":"ContainerStarted","Data":"2d94f7c61729545202f3d1202d51e364785e57bf735d53917d4bc2f003dcd2cc"} Dec 04 17:47:25 crc kubenswrapper[4631]: I1204 17:47:25.450228 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-67cns" event={"ID":"30be0340-cc50-4244-9b27-7e41f86bf113","Type":"ContainerStarted","Data":"d933e365563964fc7076857edb79aef0ebc237d5e36778a4e8201290af0ae414"} Dec 04 17:47:25 crc kubenswrapper[4631]: I1204 17:47:25.468871 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ldclc" event={"ID":"32886d79-72a7-4318-8098-718f0f55f61e","Type":"ContainerStarted","Data":"b203687fc1be8a68f8191521d238075a25f77b32e43394794e26f491b753b299"} Dec 04 17:47:26 crc kubenswrapper[4631]: I1204 17:47:26.481208 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ct9z5" event={"ID":"8c4e7e86-5efa-4888-a717-2dcafc489144","Type":"ContainerStarted","Data":"834e360c66830acb7ad72cae8d815af37f20ba01bacda37e630e7ebcdee19b69"} Dec 04 17:47:26 crc kubenswrapper[4631]: I1204 17:47:26.485461 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-66x65" event={"ID":"becd7035-989e-497f-96ad-7eaa0d7e4456","Type":"ContainerStarted","Data":"3bbaf8350bd750ca35c0a755b810d3329ff9879a0be67b2ae8b89ea34e80632c"} Dec 04 17:47:26 crc kubenswrapper[4631]: I1204 17:47:26.487306 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-grlw8" event={"ID":"acce4f1e-311d-44da-aaf9-a2cddc75be35","Type":"ContainerStarted","Data":"f11719010557fb06576368d4482e78c9ad0e2e728eded61db6974c4fc6458d1a"} Dec 04 17:47:26 crc kubenswrapper[4631]: I1204 17:47:26.524280 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ct9z5" podStartSLOduration=5.170735094 podStartE2EDuration="40.524263596s" podCreationTimestamp="2025-12-04 17:46:46 +0000 UTC" firstStartedPulling="2025-12-04 17:46:48.580338046 +0000 UTC m=+1138.612580034" lastFinishedPulling="2025-12-04 17:47:23.933866538 +0000 UTC m=+1173.966108536" observedRunningTime="2025-12-04 17:47:26.499761682 +0000 UTC m=+1176.532003680" watchObservedRunningTime="2025-12-04 17:47:26.524263596 +0000 UTC m=+1176.556505594" Dec 04 17:47:27 crc kubenswrapper[4631]: I1204 17:47:27.495282 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" event={"ID":"8eceb916-5479-43f0-a3f4-75d0643adcab","Type":"ContainerStarted","Data":"79be44a9bd7eb277f407a4bacc3b3a61cbb9d3364abd020c445ab3fb064acff2"} Dec 04 17:47:27 crc kubenswrapper[4631]: I1204 17:47:27.495655 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:47:27 crc kubenswrapper[4631]: I1204 17:47:27.539713 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" podStartSLOduration=41.539697892 podStartE2EDuration="41.539697892s" podCreationTimestamp="2025-12-04 17:46:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:47:27.53298733 +0000 UTC m=+1177.565229338" watchObservedRunningTime="2025-12-04 17:47:27.539697892 +0000 UTC m=+1177.571939890" Dec 04 17:47:29 crc kubenswrapper[4631]: E1204 17:47:29.848078 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-5854674fcc-r28bs" podUID="a19a11a8-a149-4b75-ab68-359723dcfbcb" Dec 04 17:47:29 crc kubenswrapper[4631]: E1204 17:47:29.848847 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jwf42" podUID="1d24a40a-06b2-43e4-9921-05dd2e8f27ea" Dec 04 17:47:30 crc kubenswrapper[4631]: E1204 17:47:30.377963 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-df7fm" podUID="7e9e43c6-516b-4195-9d65-e6e80544bb7d" Dec 04 17:47:30 crc kubenswrapper[4631]: E1204 17:47:30.459909 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xc5m4" podUID="1c44bc20-c171-4476-a959-9e31d9bbac58" Dec 04 17:47:30 crc kubenswrapper[4631]: E1204 17:47:30.506087 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-cz8hk" podUID="5a715ee8-c048-4447-b3fc-5f94121c0e7e" Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.539593 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2hbp2" event={"ID":"41a5b9af-e0eb-46d8-84f0-0962dd72367c","Type":"ContainerStarted","Data":"bd90c5bc66d3495eb035c0692c7ad9f19c448f2d1fc34ca8104ddf01d27cbfeb"} Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.539905 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2hbp2" Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.544000 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2hbp2" Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.545434 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-cz8hk" event={"ID":"5a715ee8-c048-4447-b3fc-5f94121c0e7e","Type":"ContainerStarted","Data":"a2832b0cdeddb85687a25f6c542787d946f6890a962fdfdc8a47afb8a1189b5a"} Dec 04 17:47:30 crc kubenswrapper[4631]: E1204 17:47:30.553317 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sw9bk" podUID="72f35a0d-fa67-44c7-a25c-b720885d5708" Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.563240 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-f5lq4" event={"ID":"f05ded75-e10d-41ed-921d-0ba118f3453d","Type":"ContainerStarted","Data":"8e822b41df3fb3550f4ceaaca61a4a4306f4bf15be4fac76473a14dcf081e9b6"} Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.564164 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-f5lq4" Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.567480 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-f5lq4" Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.579047 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" event={"ID":"e20ca639-4732-4b27-b2e2-8d4cc9374515","Type":"ContainerStarted","Data":"cd3cba77b1fe9e4d49373f7662c9db90f1a69de31f0f7cf7de50d61ec82cccb7"} Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.587518 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2hbp2" podStartSLOduration=3.121224049 podStartE2EDuration="44.587494074s" podCreationTimestamp="2025-12-04 17:46:46 +0000 UTC" firstStartedPulling="2025-12-04 17:46:48.097516845 +0000 UTC m=+1138.129758843" lastFinishedPulling="2025-12-04 17:47:29.56378687 +0000 UTC m=+1179.596028868" observedRunningTime="2025-12-04 17:47:30.569552699 +0000 UTC m=+1180.601794697" watchObservedRunningTime="2025-12-04 17:47:30.587494074 +0000 UTC m=+1180.619736072" Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.601720 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-f5lq4" podStartSLOduration=3.634015291 podStartE2EDuration="44.601700312s" podCreationTimestamp="2025-12-04 17:46:46 +0000 UTC" firstStartedPulling="2025-12-04 17:46:48.580507301 +0000 UTC m=+1138.612749299" lastFinishedPulling="2025-12-04 17:47:29.548192322 +0000 UTC m=+1179.580434320" observedRunningTime="2025-12-04 17:47:30.596709119 +0000 UTC m=+1180.628951127" watchObservedRunningTime="2025-12-04 17:47:30.601700312 +0000 UTC m=+1180.633942310" Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.613457 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-bqs5m" event={"ID":"7cf50b74-b958-4f66-aefc-2ad897abdec2","Type":"ContainerStarted","Data":"5ae69cc3459b565d35f6bb4bb4c89e92350e69bdc68c68f38b0d9a64cfd46af7"} Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.614556 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-bqs5m" Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.616037 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-9s4jp" event={"ID":"709a39e5-9fe0-4861-8761-774f26a4a315","Type":"ContainerStarted","Data":"dd550f4d6ceaff47c288de6c9b50a4bebadc4bc27555b937054fa541d4c8cef6"} Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.616543 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-9s4jp" Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.617729 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jwf42" event={"ID":"1d24a40a-06b2-43e4-9921-05dd2e8f27ea","Type":"ContainerStarted","Data":"1d03307ade60fab0ed17ddef09dbd81be311a138df041c5bd8a030c299853683"} Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.635106 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-9s4jp" Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.636915 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-r28bs" event={"ID":"a19a11a8-a149-4b75-ab68-359723dcfbcb","Type":"ContainerStarted","Data":"399f69d3ceb1a03449fdc88bde991357bdd2f31c5cc23de01d51a90819f23e6c"} Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.648306 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-2bzwd" event={"ID":"b8212ac4-255e-4de2-ac13-0033682d7550","Type":"ContainerStarted","Data":"0a9f10cf41952eb1260eb7f1b0c309f9641803266272bb659a4a8b29246b2ce2"} Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.649271 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-2bzwd" Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.660217 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xc5m4" event={"ID":"1c44bc20-c171-4476-a959-9e31d9bbac58","Type":"ContainerStarted","Data":"502c331caef1422cb60c481db4376fa1efb33dc702535dd2541b09e53cad8a96"} Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.668494 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-57k7z" event={"ID":"fdf92431-a279-4eb5-8e5d-56e353febcf2","Type":"ContainerStarted","Data":"43d760b153550249c592a5fdc4c4f1bab841fcb9f7cc611f99a0424a5e11ebf7"} Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.685716 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-2bzwd" Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.688155 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-df7fm" event={"ID":"7e9e43c6-516b-4195-9d65-e6e80544bb7d","Type":"ContainerStarted","Data":"48ece88d0a6c12864546599b37b4beda1dc244cde70729e476fa7175a14324c2"} Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.701800 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-bqs5m" Dec 04 17:47:30 crc kubenswrapper[4631]: I1204 17:47:30.912290 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-9s4jp" podStartSLOduration=3.9713760259999997 podStartE2EDuration="45.912246069s" podCreationTimestamp="2025-12-04 17:46:45 +0000 UTC" firstStartedPulling="2025-12-04 17:46:47.595732547 +0000 UTC m=+1137.627974545" lastFinishedPulling="2025-12-04 17:47:29.53660258 +0000 UTC m=+1179.568844588" observedRunningTime="2025-12-04 17:47:30.838817501 +0000 UTC m=+1180.871059499" watchObservedRunningTime="2025-12-04 17:47:30.912246069 +0000 UTC m=+1180.944488067" Dec 04 17:47:31 crc kubenswrapper[4631]: I1204 17:47:31.076108 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-2bzwd" podStartSLOduration=3.839668287 podStartE2EDuration="45.076085543s" podCreationTimestamp="2025-12-04 17:46:46 +0000 UTC" firstStartedPulling="2025-12-04 17:46:48.214772491 +0000 UTC m=+1138.247014489" lastFinishedPulling="2025-12-04 17:47:29.451189747 +0000 UTC m=+1179.483431745" observedRunningTime="2025-12-04 17:47:31.015852794 +0000 UTC m=+1181.048094802" watchObservedRunningTime="2025-12-04 17:47:31.076085543 +0000 UTC m=+1181.108327551" Dec 04 17:47:31 crc kubenswrapper[4631]: I1204 17:47:31.137200 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-bqs5m" podStartSLOduration=4.2146472 podStartE2EDuration="45.137180338s" podCreationTimestamp="2025-12-04 17:46:46 +0000 UTC" firstStartedPulling="2025-12-04 17:46:48.528216226 +0000 UTC m=+1138.560458224" lastFinishedPulling="2025-12-04 17:47:29.450749344 +0000 UTC m=+1179.482991362" observedRunningTime="2025-12-04 17:47:31.136607621 +0000 UTC m=+1181.168849639" watchObservedRunningTime="2025-12-04 17:47:31.137180338 +0000 UTC m=+1181.169422336" Dec 04 17:47:31 crc kubenswrapper[4631]: I1204 17:47:31.696060 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sw9bk" event={"ID":"72f35a0d-fa67-44c7-a25c-b720885d5708","Type":"ContainerStarted","Data":"5384c9ad38e311c55d6e2958fd8828427fc847bf9cc04737ddebce98b177d0bb"} Dec 04 17:47:31 crc kubenswrapper[4631]: I1204 17:47:31.697251 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-grlw8" event={"ID":"acce4f1e-311d-44da-aaf9-a2cddc75be35","Type":"ContainerStarted","Data":"9f3ff388473e936b05e833584d621f757a83c4b620a2da3e0dc76f800d85ef65"} Dec 04 17:47:31 crc kubenswrapper[4631]: I1204 17:47:31.697558 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-grlw8" Dec 04 17:47:31 crc kubenswrapper[4631]: I1204 17:47:31.698635 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ldclc" event={"ID":"32886d79-72a7-4318-8098-718f0f55f61e","Type":"ContainerStarted","Data":"39e793baa04b9430c4f363f7cf5c81a5f818cbce8be0f5c8248f25f100451dbd"} Dec 04 17:47:31 crc kubenswrapper[4631]: I1204 17:47:31.699097 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ldclc" Dec 04 17:47:31 crc kubenswrapper[4631]: I1204 17:47:31.699586 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-grlw8" Dec 04 17:47:31 crc kubenswrapper[4631]: I1204 17:47:31.701554 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-66x65" event={"ID":"becd7035-989e-497f-96ad-7eaa0d7e4456","Type":"ContainerStarted","Data":"a4be2a78d1de43b3b356beded4cc120ce0cc9ff269af058efeab62eeaf97f0c9"} Dec 04 17:47:31 crc kubenswrapper[4631]: I1204 17:47:31.701615 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-66x65" Dec 04 17:47:31 crc kubenswrapper[4631]: I1204 17:47:31.705187 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-66x65" Dec 04 17:47:31 crc kubenswrapper[4631]: I1204 17:47:31.715025 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ldclc" Dec 04 17:47:31 crc kubenswrapper[4631]: I1204 17:47:31.751755 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-grlw8" podStartSLOduration=4.748065575 podStartE2EDuration="45.751729983s" podCreationTimestamp="2025-12-04 17:46:46 +0000 UTC" firstStartedPulling="2025-12-04 17:46:48.533545149 +0000 UTC m=+1138.565787147" lastFinishedPulling="2025-12-04 17:47:29.537209557 +0000 UTC m=+1179.569451555" observedRunningTime="2025-12-04 17:47:31.725859921 +0000 UTC m=+1181.758101919" watchObservedRunningTime="2025-12-04 17:47:31.751729983 +0000 UTC m=+1181.783971971" Dec 04 17:47:31 crc kubenswrapper[4631]: I1204 17:47:31.755612 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ldclc" podStartSLOduration=5.5487275579999995 podStartE2EDuration="46.755604115s" podCreationTimestamp="2025-12-04 17:46:45 +0000 UTC" firstStartedPulling="2025-12-04 17:46:48.353055692 +0000 UTC m=+1138.385297690" lastFinishedPulling="2025-12-04 17:47:29.559932249 +0000 UTC m=+1179.592174247" observedRunningTime="2025-12-04 17:47:31.748506491 +0000 UTC m=+1181.780748489" watchObservedRunningTime="2025-12-04 17:47:31.755604115 +0000 UTC m=+1181.787846113" Dec 04 17:47:31 crc kubenswrapper[4631]: I1204 17:47:31.809597 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-66x65" podStartSLOduration=4.83549123 podStartE2EDuration="45.809565234s" podCreationTimestamp="2025-12-04 17:46:46 +0000 UTC" firstStartedPulling="2025-12-04 17:46:48.580598164 +0000 UTC m=+1138.612840152" lastFinishedPulling="2025-12-04 17:47:29.554672158 +0000 UTC m=+1179.586914156" observedRunningTime="2025-12-04 17:47:31.777417661 +0000 UTC m=+1181.809659679" watchObservedRunningTime="2025-12-04 17:47:31.809565234 +0000 UTC m=+1181.841807242" Dec 04 17:47:32 crc kubenswrapper[4631]: E1204 17:47:32.035617 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-czzvh" podUID="22b6958b-a18a-49c1-b6a4-28b3ebad0846" Dec 04 17:47:32 crc kubenswrapper[4631]: E1204 17:47:32.051332 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-tmr9d" podUID="76c2990b-dff1-4715-8517-28cff884cf12" Dec 04 17:47:32 crc kubenswrapper[4631]: E1204 17:47:32.051478 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xdm6x" podUID="f0089345-8234-4ea7-9fbe-528afe9d5fc0" Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.709922 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xc5m4" event={"ID":"1c44bc20-c171-4476-a959-9e31d9bbac58","Type":"ContainerStarted","Data":"0e0bc4af3bb6b7686e4093d03af65e827f40e7f0ff62cb9417f7151ca3a1b1a3"} Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.710222 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xc5m4" Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.714313 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-tmr9d" event={"ID":"76c2990b-dff1-4715-8517-28cff884cf12","Type":"ContainerStarted","Data":"2b322aacb98080d87cde1038a640ef1ae64ca55a12853e7ba29bbd1c98bc5b4e"} Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.717784 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-czzvh" event={"ID":"22b6958b-a18a-49c1-b6a4-28b3ebad0846","Type":"ContainerStarted","Data":"08c2adce7a5674cbeccdb2d966732633d76eb97ef4bcb6a59b9174c1aea380ab"} Dec 04 17:47:32 crc kubenswrapper[4631]: E1204 17:47:32.718751 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-czzvh" podUID="22b6958b-a18a-49c1-b6a4-28b3ebad0846" Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.723790 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-57k7z" event={"ID":"fdf92431-a279-4eb5-8e5d-56e353febcf2","Type":"ContainerStarted","Data":"72e050fd540dc850797a0cf600c6a753c73e0bf0c6519978017f99981624006d"} Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.724223 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-57k7z" Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.725695 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" event={"ID":"e20ca639-4732-4b27-b2e2-8d4cc9374515","Type":"ContainerStarted","Data":"64b397001607f884c6b90c2650bcea039402a7896fcdec7c1c91c2058bac3121"} Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.726076 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.727548 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-cz8hk" event={"ID":"5a715ee8-c048-4447-b3fc-5f94121c0e7e","Type":"ContainerStarted","Data":"bedd01cf7499c8fd1226e7221268c78fe002ccbcf9a86c6300a73ce135011389"} Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.727914 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-cz8hk" Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.736399 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-r28bs" event={"ID":"a19a11a8-a149-4b75-ab68-359723dcfbcb","Type":"ContainerStarted","Data":"bc9099599b1f1b8c031f4fa5f2535ef5dac738b389e14c2e84a768a7fa11309d"} Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.737054 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-r28bs" Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.741016 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xdm6x" event={"ID":"f0089345-8234-4ea7-9fbe-528afe9d5fc0","Type":"ContainerStarted","Data":"5b0f100b22b6ec4cc6814deb2fcf31ed24632b102b6cfa2c436a4357bf755705"} Dec 04 17:47:32 crc kubenswrapper[4631]: E1204 17:47:32.742168 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xdm6x" podUID="f0089345-8234-4ea7-9fbe-528afe9d5fc0" Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.748245 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sw9bk" event={"ID":"72f35a0d-fa67-44c7-a25c-b720885d5708","Type":"ContainerStarted","Data":"1c2fc9d7cc36fdc4e8935e7d6bbdc7e86d77df11afdc4ae1a048cd6e5f96f52a"} Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.748601 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sw9bk" Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.756809 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-67cns" event={"ID":"30be0340-cc50-4244-9b27-7e41f86bf113","Type":"ContainerStarted","Data":"2d2264c0d15398f636a41332d509951b733361d5f37b1ecbcfa3f112a1379aaa"} Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.758168 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-67cns" Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.759414 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jwf42" event={"ID":"1d24a40a-06b2-43e4-9921-05dd2e8f27ea","Type":"ContainerStarted","Data":"b294e37132e962543470c80b44329b65b392e86ce60493f9bcb5dd5029d8ce8d"} Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.759924 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jwf42" Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.760163 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-67cns" Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.762596 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-p6vp2" event={"ID":"dd2fd0ee-2bee-4cd2-9c24-a0c0dce37b46","Type":"ContainerStarted","Data":"e1ba444975822c3684808d38c3ab68df33af572bdf0bef16b1675151cbd1ff04"} Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.762633 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-p6vp2" Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.766845 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-p6vp2" Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.784030 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xc5m4" podStartSLOduration=2.921974221 podStartE2EDuration="46.784010013s" podCreationTimestamp="2025-12-04 17:46:46 +0000 UTC" firstStartedPulling="2025-12-04 17:46:48.518572568 +0000 UTC m=+1138.550814566" lastFinishedPulling="2025-12-04 17:47:32.38060837 +0000 UTC m=+1182.412850358" observedRunningTime="2025-12-04 17:47:32.777345631 +0000 UTC m=+1182.809587629" watchObservedRunningTime="2025-12-04 17:47:32.784010013 +0000 UTC m=+1182.816252011" Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.830304 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-r28bs" podStartSLOduration=3.044334869 podStartE2EDuration="46.830280561s" podCreationTimestamp="2025-12-04 17:46:46 +0000 UTC" firstStartedPulling="2025-12-04 17:46:48.445317929 +0000 UTC m=+1138.477559927" lastFinishedPulling="2025-12-04 17:47:32.231263621 +0000 UTC m=+1182.263505619" observedRunningTime="2025-12-04 17:47:32.82049998 +0000 UTC m=+1182.852741978" watchObservedRunningTime="2025-12-04 17:47:32.830280561 +0000 UTC m=+1182.862522559" Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.859662 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-57k7z" podStartSLOduration=42.042963483 podStartE2EDuration="46.859649334s" podCreationTimestamp="2025-12-04 17:46:46 +0000 UTC" firstStartedPulling="2025-12-04 17:47:24.538082667 +0000 UTC m=+1174.570324665" lastFinishedPulling="2025-12-04 17:47:29.354768508 +0000 UTC m=+1179.387010516" observedRunningTime="2025-12-04 17:47:32.856085362 +0000 UTC m=+1182.888327370" watchObservedRunningTime="2025-12-04 17:47:32.859649334 +0000 UTC m=+1182.891891332" Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.919739 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-cz8hk" podStartSLOduration=2.7569696439999998 podStartE2EDuration="46.919721799s" podCreationTimestamp="2025-12-04 17:46:46 +0000 UTC" firstStartedPulling="2025-12-04 17:46:47.902691775 +0000 UTC m=+1137.934933773" lastFinishedPulling="2025-12-04 17:47:32.06544393 +0000 UTC m=+1182.097685928" observedRunningTime="2025-12-04 17:47:32.883943972 +0000 UTC m=+1182.916185970" watchObservedRunningTime="2025-12-04 17:47:32.919721799 +0000 UTC m=+1182.951963797" Dec 04 17:47:32 crc kubenswrapper[4631]: I1204 17:47:32.993709 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-67cns" podStartSLOduration=5.434131193 podStartE2EDuration="47.993689343s" podCreationTimestamp="2025-12-04 17:46:45 +0000 UTC" firstStartedPulling="2025-12-04 17:46:47.056872502 +0000 UTC m=+1137.089114490" lastFinishedPulling="2025-12-04 17:47:29.616430632 +0000 UTC m=+1179.648672640" observedRunningTime="2025-12-04 17:47:32.966033929 +0000 UTC m=+1182.998275937" watchObservedRunningTime="2025-12-04 17:47:32.993689343 +0000 UTC m=+1183.025931341" Dec 04 17:47:33 crc kubenswrapper[4631]: I1204 17:47:33.046782 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" podStartSLOduration=42.16474142 podStartE2EDuration="47.046760067s" podCreationTimestamp="2025-12-04 17:46:46 +0000 UTC" firstStartedPulling="2025-12-04 17:47:24.556610239 +0000 UTC m=+1174.588852227" lastFinishedPulling="2025-12-04 17:47:29.438628866 +0000 UTC m=+1179.470870874" observedRunningTime="2025-12-04 17:47:33.032324113 +0000 UTC m=+1183.064566111" watchObservedRunningTime="2025-12-04 17:47:33.046760067 +0000 UTC m=+1183.079002065" Dec 04 17:47:33 crc kubenswrapper[4631]: I1204 17:47:33.093193 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-p6vp2" podStartSLOduration=5.970873269 podStartE2EDuration="48.09317258s" podCreationTimestamp="2025-12-04 17:46:45 +0000 UTC" firstStartedPulling="2025-12-04 17:46:47.474568579 +0000 UTC m=+1137.506810577" lastFinishedPulling="2025-12-04 17:47:29.59686788 +0000 UTC m=+1179.629109888" observedRunningTime="2025-12-04 17:47:33.083669417 +0000 UTC m=+1183.115911415" watchObservedRunningTime="2025-12-04 17:47:33.09317258 +0000 UTC m=+1183.125414578" Dec 04 17:47:33 crc kubenswrapper[4631]: I1204 17:47:33.134154 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sw9bk" podStartSLOduration=3.5312128229999997 podStartE2EDuration="47.134134706s" podCreationTimestamp="2025-12-04 17:46:46 +0000 UTC" firstStartedPulling="2025-12-04 17:46:48.496138902 +0000 UTC m=+1138.528380900" lastFinishedPulling="2025-12-04 17:47:32.099060785 +0000 UTC m=+1182.131302783" observedRunningTime="2025-12-04 17:47:33.124957392 +0000 UTC m=+1183.157199400" watchObservedRunningTime="2025-12-04 17:47:33.134134706 +0000 UTC m=+1183.166376704" Dec 04 17:47:33 crc kubenswrapper[4631]: I1204 17:47:33.418040 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-f65bcfbd6-zphvh" Dec 04 17:47:33 crc kubenswrapper[4631]: I1204 17:47:33.454591 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jwf42" podStartSLOduration=2.902627056 podStartE2EDuration="47.454575917s" podCreationTimestamp="2025-12-04 17:46:46 +0000 UTC" firstStartedPulling="2025-12-04 17:46:47.897062163 +0000 UTC m=+1137.929304161" lastFinishedPulling="2025-12-04 17:47:32.449011024 +0000 UTC m=+1182.481253022" observedRunningTime="2025-12-04 17:47:33.153133861 +0000 UTC m=+1183.185375859" watchObservedRunningTime="2025-12-04 17:47:33.454575917 +0000 UTC m=+1183.486817915" Dec 04 17:47:33 crc kubenswrapper[4631]: I1204 17:47:33.770133 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-tmr9d" event={"ID":"76c2990b-dff1-4715-8517-28cff884cf12","Type":"ContainerStarted","Data":"39585390bb376874f493e994c26b0c60463d52df019a593efc173d2c75be75a2"} Dec 04 17:47:33 crc kubenswrapper[4631]: I1204 17:47:33.770283 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-tmr9d" Dec 04 17:47:33 crc kubenswrapper[4631]: I1204 17:47:33.771843 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-df7fm" event={"ID":"7e9e43c6-516b-4195-9d65-e6e80544bb7d","Type":"ContainerStarted","Data":"45c8e82162f081e2f9c2ac36d695bec194e2c3cf4aba2c9e7d915408834653e5"} Dec 04 17:47:33 crc kubenswrapper[4631]: I1204 17:47:33.792256 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-tmr9d" podStartSLOduration=3.040327599 podStartE2EDuration="47.792238422s" podCreationTimestamp="2025-12-04 17:46:46 +0000 UTC" firstStartedPulling="2025-12-04 17:46:48.38875568 +0000 UTC m=+1138.420997678" lastFinishedPulling="2025-12-04 17:47:33.140666503 +0000 UTC m=+1183.172908501" observedRunningTime="2025-12-04 17:47:33.788228467 +0000 UTC m=+1183.820470465" watchObservedRunningTime="2025-12-04 17:47:33.792238422 +0000 UTC m=+1183.824480420" Dec 04 17:47:33 crc kubenswrapper[4631]: I1204 17:47:33.809047 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-df7fm" podStartSLOduration=3.686458095 podStartE2EDuration="47.809026914s" podCreationTimestamp="2025-12-04 17:46:46 +0000 UTC" firstStartedPulling="2025-12-04 17:46:48.445656278 +0000 UTC m=+1138.477898276" lastFinishedPulling="2025-12-04 17:47:32.568225097 +0000 UTC m=+1182.600467095" observedRunningTime="2025-12-04 17:47:33.805101842 +0000 UTC m=+1183.837343850" watchObservedRunningTime="2025-12-04 17:47:33.809026914 +0000 UTC m=+1183.841268912" Dec 04 17:47:34 crc kubenswrapper[4631]: I1204 17:47:34.783415 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xdm6x" event={"ID":"f0089345-8234-4ea7-9fbe-528afe9d5fc0","Type":"ContainerStarted","Data":"b679853f8a5457b97274ed9effa84873c2174f05e3237ac70fb8498991d54279"} Dec 04 17:47:34 crc kubenswrapper[4631]: I1204 17:47:34.784238 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xdm6x" Dec 04 17:47:34 crc kubenswrapper[4631]: I1204 17:47:34.788199 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-czzvh" event={"ID":"22b6958b-a18a-49c1-b6a4-28b3ebad0846","Type":"ContainerStarted","Data":"62da451a83f82630cfca5bc7a861b6f752c1082dba3fd2e362966fe30686c9f6"} Dec 04 17:47:34 crc kubenswrapper[4631]: I1204 17:47:34.790266 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-df7fm" Dec 04 17:47:34 crc kubenswrapper[4631]: I1204 17:47:34.803608 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq" Dec 04 17:47:34 crc kubenswrapper[4631]: I1204 17:47:34.808604 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-57k7z" Dec 04 17:47:34 crc kubenswrapper[4631]: I1204 17:47:34.844806 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xdm6x" podStartSLOduration=2.575591567 podStartE2EDuration="48.844785975s" podCreationTimestamp="2025-12-04 17:46:46 +0000 UTC" firstStartedPulling="2025-12-04 17:46:47.906449153 +0000 UTC m=+1137.938691151" lastFinishedPulling="2025-12-04 17:47:34.175643561 +0000 UTC m=+1184.207885559" observedRunningTime="2025-12-04 17:47:34.80876268 +0000 UTC m=+1184.841004698" watchObservedRunningTime="2025-12-04 17:47:34.844785975 +0000 UTC m=+1184.877028013" Dec 04 17:47:34 crc kubenswrapper[4631]: I1204 17:47:34.903791 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-czzvh" podStartSLOduration=3.19339679 podStartE2EDuration="48.903767098s" podCreationTimestamp="2025-12-04 17:46:46 +0000 UTC" firstStartedPulling="2025-12-04 17:46:48.463881653 +0000 UTC m=+1138.496123651" lastFinishedPulling="2025-12-04 17:47:34.174251961 +0000 UTC m=+1184.206493959" observedRunningTime="2025-12-04 17:47:34.899855256 +0000 UTC m=+1184.932097254" watchObservedRunningTime="2025-12-04 17:47:34.903767098 +0000 UTC m=+1184.936009096" Dec 04 17:47:36 crc kubenswrapper[4631]: I1204 17:47:36.022721 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:47:36 crc kubenswrapper[4631]: I1204 17:47:36.022800 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:47:36 crc kubenswrapper[4631]: I1204 17:47:36.023132 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:47:36 crc kubenswrapper[4631]: I1204 17:47:36.023859 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2b743632dd2ae2acbde167ee221ee6a9a0928b1076cfc48d3d7e9758476527f1"} pod="openshift-machine-config-operator/machine-config-daemon-q27wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 17:47:36 crc kubenswrapper[4631]: I1204 17:47:36.023959 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" containerID="cri-o://2b743632dd2ae2acbde167ee221ee6a9a0928b1076cfc48d3d7e9758476527f1" gracePeriod=600 Dec 04 17:47:36 crc kubenswrapper[4631]: I1204 17:47:36.758580 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-czzvh" Dec 04 17:47:36 crc kubenswrapper[4631]: I1204 17:47:36.801655 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerID="2b743632dd2ae2acbde167ee221ee6a9a0928b1076cfc48d3d7e9758476527f1" exitCode=0 Dec 04 17:47:36 crc kubenswrapper[4631]: I1204 17:47:36.801707 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerDied","Data":"2b743632dd2ae2acbde167ee221ee6a9a0928b1076cfc48d3d7e9758476527f1"} Dec 04 17:47:36 crc kubenswrapper[4631]: I1204 17:47:36.801745 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"748b1412c888c95d08adbf71c3c971aef4060a8000682c7031f16f9f8ee657ac"} Dec 04 17:47:36 crc kubenswrapper[4631]: I1204 17:47:36.801762 4631 scope.go:117] "RemoveContainer" containerID="34035c448316dbd54a5149bba9f1bb4ce2bc406518cc3e31ea4f3aeb08daaf2b" Dec 04 17:47:37 crc kubenswrapper[4631]: I1204 17:47:37.019770 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-df7fm" Dec 04 17:47:37 crc kubenswrapper[4631]: I1204 17:47:37.282782 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-r28bs" Dec 04 17:47:46 crc kubenswrapper[4631]: I1204 17:47:46.459488 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jwf42" Dec 04 17:47:46 crc kubenswrapper[4631]: I1204 17:47:46.503869 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-cz8hk" Dec 04 17:47:46 crc kubenswrapper[4631]: I1204 17:47:46.696207 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xdm6x" Dec 04 17:47:46 crc kubenswrapper[4631]: I1204 17:47:46.696634 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sw9bk" Dec 04 17:47:46 crc kubenswrapper[4631]: I1204 17:47:46.719048 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-tmr9d" Dec 04 17:47:46 crc kubenswrapper[4631]: I1204 17:47:46.764092 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-czzvh" Dec 04 17:47:46 crc kubenswrapper[4631]: I1204 17:47:46.856326 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xc5m4" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.165923 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4xx6b"] Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.168966 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-4xx6b" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.173526 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.174387 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.174522 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.174652 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-bv8z4" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.180811 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4xx6b"] Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.229057 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-lbfcv"] Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.230226 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-lbfcv" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.237533 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.317477 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-lbfcv"] Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.340012 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ba62723-e502-40e0-bca9-57f552d5a715-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-lbfcv\" (UID: \"7ba62723-e502-40e0-bca9-57f552d5a715\") " pod="openstack/dnsmasq-dns-78dd6ddcc-lbfcv" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.340109 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btgr8\" (UniqueName: \"kubernetes.io/projected/7ba62723-e502-40e0-bca9-57f552d5a715-kube-api-access-btgr8\") pod \"dnsmasq-dns-78dd6ddcc-lbfcv\" (UID: \"7ba62723-e502-40e0-bca9-57f552d5a715\") " pod="openstack/dnsmasq-dns-78dd6ddcc-lbfcv" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.340150 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rgqg\" (UniqueName: \"kubernetes.io/projected/b927f9cb-62ad-4b2d-99a6-4309bccc7ecf-kube-api-access-4rgqg\") pod \"dnsmasq-dns-675f4bcbfc-4xx6b\" (UID: \"b927f9cb-62ad-4b2d-99a6-4309bccc7ecf\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4xx6b" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.340169 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b927f9cb-62ad-4b2d-99a6-4309bccc7ecf-config\") pod \"dnsmasq-dns-675f4bcbfc-4xx6b\" (UID: \"b927f9cb-62ad-4b2d-99a6-4309bccc7ecf\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4xx6b" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.340216 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ba62723-e502-40e0-bca9-57f552d5a715-config\") pod \"dnsmasq-dns-78dd6ddcc-lbfcv\" (UID: \"7ba62723-e502-40e0-bca9-57f552d5a715\") " pod="openstack/dnsmasq-dns-78dd6ddcc-lbfcv" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.441639 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ba62723-e502-40e0-bca9-57f552d5a715-config\") pod \"dnsmasq-dns-78dd6ddcc-lbfcv\" (UID: \"7ba62723-e502-40e0-bca9-57f552d5a715\") " pod="openstack/dnsmasq-dns-78dd6ddcc-lbfcv" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.441719 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ba62723-e502-40e0-bca9-57f552d5a715-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-lbfcv\" (UID: \"7ba62723-e502-40e0-bca9-57f552d5a715\") " pod="openstack/dnsmasq-dns-78dd6ddcc-lbfcv" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.441770 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btgr8\" (UniqueName: \"kubernetes.io/projected/7ba62723-e502-40e0-bca9-57f552d5a715-kube-api-access-btgr8\") pod \"dnsmasq-dns-78dd6ddcc-lbfcv\" (UID: \"7ba62723-e502-40e0-bca9-57f552d5a715\") " pod="openstack/dnsmasq-dns-78dd6ddcc-lbfcv" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.441813 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rgqg\" (UniqueName: \"kubernetes.io/projected/b927f9cb-62ad-4b2d-99a6-4309bccc7ecf-kube-api-access-4rgqg\") pod \"dnsmasq-dns-675f4bcbfc-4xx6b\" (UID: \"b927f9cb-62ad-4b2d-99a6-4309bccc7ecf\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4xx6b" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.441835 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b927f9cb-62ad-4b2d-99a6-4309bccc7ecf-config\") pod \"dnsmasq-dns-675f4bcbfc-4xx6b\" (UID: \"b927f9cb-62ad-4b2d-99a6-4309bccc7ecf\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4xx6b" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.442839 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ba62723-e502-40e0-bca9-57f552d5a715-config\") pod \"dnsmasq-dns-78dd6ddcc-lbfcv\" (UID: \"7ba62723-e502-40e0-bca9-57f552d5a715\") " pod="openstack/dnsmasq-dns-78dd6ddcc-lbfcv" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.442842 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b927f9cb-62ad-4b2d-99a6-4309bccc7ecf-config\") pod \"dnsmasq-dns-675f4bcbfc-4xx6b\" (UID: \"b927f9cb-62ad-4b2d-99a6-4309bccc7ecf\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4xx6b" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.442994 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ba62723-e502-40e0-bca9-57f552d5a715-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-lbfcv\" (UID: \"7ba62723-e502-40e0-bca9-57f552d5a715\") " pod="openstack/dnsmasq-dns-78dd6ddcc-lbfcv" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.464560 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btgr8\" (UniqueName: \"kubernetes.io/projected/7ba62723-e502-40e0-bca9-57f552d5a715-kube-api-access-btgr8\") pod \"dnsmasq-dns-78dd6ddcc-lbfcv\" (UID: \"7ba62723-e502-40e0-bca9-57f552d5a715\") " pod="openstack/dnsmasq-dns-78dd6ddcc-lbfcv" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.464646 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rgqg\" (UniqueName: \"kubernetes.io/projected/b927f9cb-62ad-4b2d-99a6-4309bccc7ecf-kube-api-access-4rgqg\") pod \"dnsmasq-dns-675f4bcbfc-4xx6b\" (UID: \"b927f9cb-62ad-4b2d-99a6-4309bccc7ecf\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4xx6b" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.486640 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-4xx6b" Dec 04 17:48:05 crc kubenswrapper[4631]: I1204 17:48:05.549324 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-lbfcv" Dec 04 17:48:06 crc kubenswrapper[4631]: I1204 17:48:06.021833 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4xx6b"] Dec 04 17:48:06 crc kubenswrapper[4631]: I1204 17:48:06.028394 4631 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 17:48:06 crc kubenswrapper[4631]: I1204 17:48:06.067321 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-4xx6b" event={"ID":"b927f9cb-62ad-4b2d-99a6-4309bccc7ecf","Type":"ContainerStarted","Data":"b9a28874166a139df410cb619fdd7b6571eb2cebb8fe3f4a8d2c80c566578edc"} Dec 04 17:48:06 crc kubenswrapper[4631]: I1204 17:48:06.078992 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-lbfcv"] Dec 04 17:48:06 crc kubenswrapper[4631]: W1204 17:48:06.081153 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ba62723_e502_40e0_bca9_57f552d5a715.slice/crio-629a4cd4940bfc4d0fec624b4b6500b7352c782fb777e87c61dd0df1ced35833 WatchSource:0}: Error finding container 629a4cd4940bfc4d0fec624b4b6500b7352c782fb777e87c61dd0df1ced35833: Status 404 returned error can't find the container with id 629a4cd4940bfc4d0fec624b4b6500b7352c782fb777e87c61dd0df1ced35833 Dec 04 17:48:07 crc kubenswrapper[4631]: I1204 17:48:07.079062 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-lbfcv" event={"ID":"7ba62723-e502-40e0-bca9-57f552d5a715","Type":"ContainerStarted","Data":"629a4cd4940bfc4d0fec624b4b6500b7352c782fb777e87c61dd0df1ced35833"} Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.052382 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4xx6b"] Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.088334 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-hjd8c"] Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.092595 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-hjd8c" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.094409 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3425ef2-08d1-4d79-b36e-03852e0e0750-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-hjd8c\" (UID: \"a3425ef2-08d1-4d79-b36e-03852e0e0750\") " pod="openstack/dnsmasq-dns-5ccc8479f9-hjd8c" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.094483 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3425ef2-08d1-4d79-b36e-03852e0e0750-config\") pod \"dnsmasq-dns-5ccc8479f9-hjd8c\" (UID: \"a3425ef2-08d1-4d79-b36e-03852e0e0750\") " pod="openstack/dnsmasq-dns-5ccc8479f9-hjd8c" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.094529 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spfvb\" (UniqueName: \"kubernetes.io/projected/a3425ef2-08d1-4d79-b36e-03852e0e0750-kube-api-access-spfvb\") pod \"dnsmasq-dns-5ccc8479f9-hjd8c\" (UID: \"a3425ef2-08d1-4d79-b36e-03852e0e0750\") " pod="openstack/dnsmasq-dns-5ccc8479f9-hjd8c" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.118064 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-hjd8c"] Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.198324 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spfvb\" (UniqueName: \"kubernetes.io/projected/a3425ef2-08d1-4d79-b36e-03852e0e0750-kube-api-access-spfvb\") pod \"dnsmasq-dns-5ccc8479f9-hjd8c\" (UID: \"a3425ef2-08d1-4d79-b36e-03852e0e0750\") " pod="openstack/dnsmasq-dns-5ccc8479f9-hjd8c" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.198407 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3425ef2-08d1-4d79-b36e-03852e0e0750-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-hjd8c\" (UID: \"a3425ef2-08d1-4d79-b36e-03852e0e0750\") " pod="openstack/dnsmasq-dns-5ccc8479f9-hjd8c" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.198474 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3425ef2-08d1-4d79-b36e-03852e0e0750-config\") pod \"dnsmasq-dns-5ccc8479f9-hjd8c\" (UID: \"a3425ef2-08d1-4d79-b36e-03852e0e0750\") " pod="openstack/dnsmasq-dns-5ccc8479f9-hjd8c" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.199549 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3425ef2-08d1-4d79-b36e-03852e0e0750-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-hjd8c\" (UID: \"a3425ef2-08d1-4d79-b36e-03852e0e0750\") " pod="openstack/dnsmasq-dns-5ccc8479f9-hjd8c" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.199569 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3425ef2-08d1-4d79-b36e-03852e0e0750-config\") pod \"dnsmasq-dns-5ccc8479f9-hjd8c\" (UID: \"a3425ef2-08d1-4d79-b36e-03852e0e0750\") " pod="openstack/dnsmasq-dns-5ccc8479f9-hjd8c" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.238228 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spfvb\" (UniqueName: \"kubernetes.io/projected/a3425ef2-08d1-4d79-b36e-03852e0e0750-kube-api-access-spfvb\") pod \"dnsmasq-dns-5ccc8479f9-hjd8c\" (UID: \"a3425ef2-08d1-4d79-b36e-03852e0e0750\") " pod="openstack/dnsmasq-dns-5ccc8479f9-hjd8c" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.398617 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-lbfcv"] Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.413854 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-hjd8c" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.433091 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-nw8pm"] Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.434602 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-nw8pm" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.442880 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-nw8pm"] Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.604115 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/57d7d18e-e52d-417d-b62d-113550d69b79-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-nw8pm\" (UID: \"57d7d18e-e52d-417d-b62d-113550d69b79\") " pod="openstack/dnsmasq-dns-57d769cc4f-nw8pm" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.604203 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57d7d18e-e52d-417d-b62d-113550d69b79-config\") pod \"dnsmasq-dns-57d769cc4f-nw8pm\" (UID: \"57d7d18e-e52d-417d-b62d-113550d69b79\") " pod="openstack/dnsmasq-dns-57d769cc4f-nw8pm" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.604267 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tqm6\" (UniqueName: \"kubernetes.io/projected/57d7d18e-e52d-417d-b62d-113550d69b79-kube-api-access-9tqm6\") pod \"dnsmasq-dns-57d769cc4f-nw8pm\" (UID: \"57d7d18e-e52d-417d-b62d-113550d69b79\") " pod="openstack/dnsmasq-dns-57d769cc4f-nw8pm" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.705283 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57d7d18e-e52d-417d-b62d-113550d69b79-config\") pod \"dnsmasq-dns-57d769cc4f-nw8pm\" (UID: \"57d7d18e-e52d-417d-b62d-113550d69b79\") " pod="openstack/dnsmasq-dns-57d769cc4f-nw8pm" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.705396 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tqm6\" (UniqueName: \"kubernetes.io/projected/57d7d18e-e52d-417d-b62d-113550d69b79-kube-api-access-9tqm6\") pod \"dnsmasq-dns-57d769cc4f-nw8pm\" (UID: \"57d7d18e-e52d-417d-b62d-113550d69b79\") " pod="openstack/dnsmasq-dns-57d769cc4f-nw8pm" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.705452 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/57d7d18e-e52d-417d-b62d-113550d69b79-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-nw8pm\" (UID: \"57d7d18e-e52d-417d-b62d-113550d69b79\") " pod="openstack/dnsmasq-dns-57d769cc4f-nw8pm" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.706556 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/57d7d18e-e52d-417d-b62d-113550d69b79-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-nw8pm\" (UID: \"57d7d18e-e52d-417d-b62d-113550d69b79\") " pod="openstack/dnsmasq-dns-57d769cc4f-nw8pm" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.710640 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57d7d18e-e52d-417d-b62d-113550d69b79-config\") pod \"dnsmasq-dns-57d769cc4f-nw8pm\" (UID: \"57d7d18e-e52d-417d-b62d-113550d69b79\") " pod="openstack/dnsmasq-dns-57d769cc4f-nw8pm" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.744444 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tqm6\" (UniqueName: \"kubernetes.io/projected/57d7d18e-e52d-417d-b62d-113550d69b79-kube-api-access-9tqm6\") pod \"dnsmasq-dns-57d769cc4f-nw8pm\" (UID: \"57d7d18e-e52d-417d-b62d-113550d69b79\") " pod="openstack/dnsmasq-dns-57d769cc4f-nw8pm" Dec 04 17:48:08 crc kubenswrapper[4631]: I1204 17:48:08.777915 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-nw8pm" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.014660 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-hjd8c"] Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.097905 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-hjd8c" event={"ID":"a3425ef2-08d1-4d79-b36e-03852e0e0750","Type":"ContainerStarted","Data":"f01b9cc2082b3750211068ca70ac93331696bf4ad481f1cfded99982444f977a"} Dec 04 17:48:09 crc kubenswrapper[4631]: W1204 17:48:09.264677 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57d7d18e_e52d_417d_b62d_113550d69b79.slice/crio-f5d2cd08528f39fd7ce850d3739939d3e19c815e11b40cd371a8c632cac72d26 WatchSource:0}: Error finding container f5d2cd08528f39fd7ce850d3739939d3e19c815e11b40cd371a8c632cac72d26: Status 404 returned error can't find the container with id f5d2cd08528f39fd7ce850d3739939d3e19c815e11b40cd371a8c632cac72d26 Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.266054 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-nw8pm"] Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.273216 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.275209 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.279964 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.280267 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.281140 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-hdwpp" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.281266 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.281542 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.281878 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.282117 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.298381 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.436497 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a71b38c4-ee61-49f2-8c8c-5adc05df2159-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.436580 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.437387 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjrwd\" (UniqueName: \"kubernetes.io/projected/a71b38c4-ee61-49f2-8c8c-5adc05df2159-kube-api-access-gjrwd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.437423 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a71b38c4-ee61-49f2-8c8c-5adc05df2159-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.437444 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.437466 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.437494 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a71b38c4-ee61-49f2-8c8c-5adc05df2159-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.437517 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.437545 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a71b38c4-ee61-49f2-8c8c-5adc05df2159-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.437562 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.437588 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a71b38c4-ee61-49f2-8c8c-5adc05df2159-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.538236 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjrwd\" (UniqueName: \"kubernetes.io/projected/a71b38c4-ee61-49f2-8c8c-5adc05df2159-kube-api-access-gjrwd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.538286 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a71b38c4-ee61-49f2-8c8c-5adc05df2159-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.538306 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.538341 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.538440 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a71b38c4-ee61-49f2-8c8c-5adc05df2159-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.538461 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.538481 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a71b38c4-ee61-49f2-8c8c-5adc05df2159-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.538495 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.538516 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a71b38c4-ee61-49f2-8c8c-5adc05df2159-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.538542 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a71b38c4-ee61-49f2-8c8c-5adc05df2159-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.538572 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.542613 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a71b38c4-ee61-49f2-8c8c-5adc05df2159-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.542745 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.542937 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.543028 4631 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.543633 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a71b38c4-ee61-49f2-8c8c-5adc05df2159-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.543698 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a71b38c4-ee61-49f2-8c8c-5adc05df2159-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.545540 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a71b38c4-ee61-49f2-8c8c-5adc05df2159-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.551809 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.562508 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.569064 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a71b38c4-ee61-49f2-8c8c-5adc05df2159-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.577759 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjrwd\" (UniqueName: \"kubernetes.io/projected/a71b38c4-ee61-49f2-8c8c-5adc05df2159-kube-api-access-gjrwd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.585808 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.587641 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.591061 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.591348 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-rtjh5" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.591491 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.591690 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.591819 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.591963 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.592138 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.592465 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.622636 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.630444 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.742437 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.742508 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.742534 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.742586 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.742602 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq66d\" (UniqueName: \"kubernetes.io/projected/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-kube-api-access-rq66d\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.742617 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-server-conf\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.742663 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.742704 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-config-data\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.742751 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-pod-info\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.742769 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.742819 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.844427 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-config-data\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.844492 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-pod-info\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.844519 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.844551 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.844617 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.844640 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.844667 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.844696 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.844716 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq66d\" (UniqueName: \"kubernetes.io/projected/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-kube-api-access-rq66d\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.844735 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-server-conf\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.844824 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.845446 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-config-data\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.845713 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.845844 4631 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.849117 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.849407 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.849835 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-pod-info\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.850106 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.851352 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.852226 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-server-conf\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.857082 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.871524 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq66d\" (UniqueName: \"kubernetes.io/projected/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-kube-api-access-rq66d\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.878274 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " pod="openstack/rabbitmq-server-0" Dec 04 17:48:09 crc kubenswrapper[4631]: I1204 17:48:09.951040 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 04 17:48:10 crc kubenswrapper[4631]: I1204 17:48:10.119900 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-nw8pm" event={"ID":"57d7d18e-e52d-417d-b62d-113550d69b79","Type":"ContainerStarted","Data":"f5d2cd08528f39fd7ce850d3739939d3e19c815e11b40cd371a8c632cac72d26"} Dec 04 17:48:10 crc kubenswrapper[4631]: I1204 17:48:10.222951 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 04 17:48:10 crc kubenswrapper[4631]: W1204 17:48:10.305280 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda71b38c4_ee61_49f2_8c8c_5adc05df2159.slice/crio-75cad5d58e51c5c945c6d86a2c7840ab5d7b0a339cb9675861a9df32f488a608 WatchSource:0}: Error finding container 75cad5d58e51c5c945c6d86a2c7840ab5d7b0a339cb9675861a9df32f488a608: Status 404 returned error can't find the container with id 75cad5d58e51c5c945c6d86a2c7840ab5d7b0a339cb9675861a9df32f488a608 Dec 04 17:48:10 crc kubenswrapper[4631]: I1204 17:48:10.348557 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 04 17:48:10 crc kubenswrapper[4631]: W1204 17:48:10.389483 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2ad48d12_6e35_428b_ac2e_ee6c2cf668ef.slice/crio-198eb62e4f98d1676b218676effd75697bf876b08d34c56bbf9c4a8293467834 WatchSource:0}: Error finding container 198eb62e4f98d1676b218676effd75697bf876b08d34c56bbf9c4a8293467834: Status 404 returned error can't find the container with id 198eb62e4f98d1676b218676effd75697bf876b08d34c56bbf9c4a8293467834 Dec 04 17:48:10 crc kubenswrapper[4631]: I1204 17:48:10.917498 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 04 17:48:10 crc kubenswrapper[4631]: I1204 17:48:10.920924 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 04 17:48:10 crc kubenswrapper[4631]: I1204 17:48:10.923815 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-w8xjz" Dec 04 17:48:10 crc kubenswrapper[4631]: I1204 17:48:10.923882 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 04 17:48:10 crc kubenswrapper[4631]: I1204 17:48:10.923940 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 04 17:48:10 crc kubenswrapper[4631]: I1204 17:48:10.923891 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 04 17:48:10 crc kubenswrapper[4631]: I1204 17:48:10.935350 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 04 17:48:10 crc kubenswrapper[4631]: I1204 17:48:10.946658 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.066059 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0c7df533-7298-4204-aeca-992631c9ccb6-config-data-default\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.066103 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0c7df533-7298-4204-aeca-992631c9ccb6-kolla-config\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.066313 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56psv\" (UniqueName: \"kubernetes.io/projected/0c7df533-7298-4204-aeca-992631c9ccb6-kube-api-access-56psv\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.066415 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.066468 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c7df533-7298-4204-aeca-992631c9ccb6-operator-scripts\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.066535 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0c7df533-7298-4204-aeca-992631c9ccb6-config-data-generated\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.066561 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c7df533-7298-4204-aeca-992631c9ccb6-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.066644 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c7df533-7298-4204-aeca-992631c9ccb6-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.168092 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56psv\" (UniqueName: \"kubernetes.io/projected/0c7df533-7298-4204-aeca-992631c9ccb6-kube-api-access-56psv\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.168148 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.168168 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c7df533-7298-4204-aeca-992631c9ccb6-operator-scripts\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.168192 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0c7df533-7298-4204-aeca-992631c9ccb6-config-data-generated\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.168210 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c7df533-7298-4204-aeca-992631c9ccb6-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.168233 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c7df533-7298-4204-aeca-992631c9ccb6-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.168272 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0c7df533-7298-4204-aeca-992631c9ccb6-config-data-default\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.168293 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0c7df533-7298-4204-aeca-992631c9ccb6-kolla-config\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.168663 4631 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.169108 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0c7df533-7298-4204-aeca-992631c9ccb6-kolla-config\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.170045 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0c7df533-7298-4204-aeca-992631c9ccb6-config-data-generated\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.171189 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0c7df533-7298-4204-aeca-992631c9ccb6-config-data-default\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.173300 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c7df533-7298-4204-aeca-992631c9ccb6-operator-scripts\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.181417 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c7df533-7298-4204-aeca-992631c9ccb6-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.181414 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c7df533-7298-4204-aeca-992631c9ccb6-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.207425 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.215697 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56psv\" (UniqueName: \"kubernetes.io/projected/0c7df533-7298-4204-aeca-992631c9ccb6-kube-api-access-56psv\") pod \"openstack-galera-0\" (UID: \"0c7df533-7298-4204-aeca-992631c9ccb6\") " pod="openstack/openstack-galera-0" Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.218804 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef","Type":"ContainerStarted","Data":"198eb62e4f98d1676b218676effd75697bf876b08d34c56bbf9c4a8293467834"} Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.220891 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a71b38c4-ee61-49f2-8c8c-5adc05df2159","Type":"ContainerStarted","Data":"75cad5d58e51c5c945c6d86a2c7840ab5d7b0a339cb9675861a9df32f488a608"} Dec 04 17:48:11 crc kubenswrapper[4631]: I1204 17:48:11.248568 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.208605 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.210099 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.218054 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-qldtf" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.218240 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.218433 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.218652 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.312911 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.389097 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4c1e28f2-5820-4e06-a20b-a9062d8280be-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.389209 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c1e28f2-5820-4e06-a20b-a9062d8280be-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.389263 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.389286 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbgvq\" (UniqueName: \"kubernetes.io/projected/4c1e28f2-5820-4e06-a20b-a9062d8280be-kube-api-access-bbgvq\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.389345 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c1e28f2-5820-4e06-a20b-a9062d8280be-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.389388 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4c1e28f2-5820-4e06-a20b-a9062d8280be-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.389419 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c1e28f2-5820-4e06-a20b-a9062d8280be-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.389449 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4c1e28f2-5820-4e06-a20b-a9062d8280be-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.491076 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c1e28f2-5820-4e06-a20b-a9062d8280be-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.491139 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.491156 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbgvq\" (UniqueName: \"kubernetes.io/projected/4c1e28f2-5820-4e06-a20b-a9062d8280be-kube-api-access-bbgvq\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.491203 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c1e28f2-5820-4e06-a20b-a9062d8280be-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.491224 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4c1e28f2-5820-4e06-a20b-a9062d8280be-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.491364 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c1e28f2-5820-4e06-a20b-a9062d8280be-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.491590 4631 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.491861 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4c1e28f2-5820-4e06-a20b-a9062d8280be-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.492208 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4c1e28f2-5820-4e06-a20b-a9062d8280be-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.493758 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c1e28f2-5820-4e06-a20b-a9062d8280be-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.493804 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4c1e28f2-5820-4e06-a20b-a9062d8280be-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.493850 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4c1e28f2-5820-4e06-a20b-a9062d8280be-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.494293 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4c1e28f2-5820-4e06-a20b-a9062d8280be-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.499159 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c1e28f2-5820-4e06-a20b-a9062d8280be-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.509659 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbgvq\" (UniqueName: \"kubernetes.io/projected/4c1e28f2-5820-4e06-a20b-a9062d8280be-kube-api-access-bbgvq\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.514399 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c1e28f2-5820-4e06-a20b-a9062d8280be-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.536626 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4c1e28f2-5820-4e06-a20b-a9062d8280be\") " pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.552452 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.783279 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.784613 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.792568 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.793102 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.793229 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-rh9cq" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.804905 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e136634f-2944-42c8-bd08-517411c92754-combined-ca-bundle\") pod \"memcached-0\" (UID: \"e136634f-2944-42c8-bd08-517411c92754\") " pod="openstack/memcached-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.804956 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdcvx\" (UniqueName: \"kubernetes.io/projected/e136634f-2944-42c8-bd08-517411c92754-kube-api-access-cdcvx\") pod \"memcached-0\" (UID: \"e136634f-2944-42c8-bd08-517411c92754\") " pod="openstack/memcached-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.805054 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/e136634f-2944-42c8-bd08-517411c92754-memcached-tls-certs\") pod \"memcached-0\" (UID: \"e136634f-2944-42c8-bd08-517411c92754\") " pod="openstack/memcached-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.805289 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e136634f-2944-42c8-bd08-517411c92754-config-data\") pod \"memcached-0\" (UID: \"e136634f-2944-42c8-bd08-517411c92754\") " pod="openstack/memcached-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.805419 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e136634f-2944-42c8-bd08-517411c92754-kolla-config\") pod \"memcached-0\" (UID: \"e136634f-2944-42c8-bd08-517411c92754\") " pod="openstack/memcached-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.809124 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.909335 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e136634f-2944-42c8-bd08-517411c92754-combined-ca-bundle\") pod \"memcached-0\" (UID: \"e136634f-2944-42c8-bd08-517411c92754\") " pod="openstack/memcached-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.909417 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdcvx\" (UniqueName: \"kubernetes.io/projected/e136634f-2944-42c8-bd08-517411c92754-kube-api-access-cdcvx\") pod \"memcached-0\" (UID: \"e136634f-2944-42c8-bd08-517411c92754\") " pod="openstack/memcached-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.909896 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/e136634f-2944-42c8-bd08-517411c92754-memcached-tls-certs\") pod \"memcached-0\" (UID: \"e136634f-2944-42c8-bd08-517411c92754\") " pod="openstack/memcached-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.909949 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e136634f-2944-42c8-bd08-517411c92754-config-data\") pod \"memcached-0\" (UID: \"e136634f-2944-42c8-bd08-517411c92754\") " pod="openstack/memcached-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.909988 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e136634f-2944-42c8-bd08-517411c92754-kolla-config\") pod \"memcached-0\" (UID: \"e136634f-2944-42c8-bd08-517411c92754\") " pod="openstack/memcached-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.912709 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e136634f-2944-42c8-bd08-517411c92754-kolla-config\") pod \"memcached-0\" (UID: \"e136634f-2944-42c8-bd08-517411c92754\") " pod="openstack/memcached-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.913518 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e136634f-2944-42c8-bd08-517411c92754-config-data\") pod \"memcached-0\" (UID: \"e136634f-2944-42c8-bd08-517411c92754\") " pod="openstack/memcached-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.914922 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/e136634f-2944-42c8-bd08-517411c92754-memcached-tls-certs\") pod \"memcached-0\" (UID: \"e136634f-2944-42c8-bd08-517411c92754\") " pod="openstack/memcached-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.935078 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e136634f-2944-42c8-bd08-517411c92754-combined-ca-bundle\") pod \"memcached-0\" (UID: \"e136634f-2944-42c8-bd08-517411c92754\") " pod="openstack/memcached-0" Dec 04 17:48:12 crc kubenswrapper[4631]: I1204 17:48:12.965713 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdcvx\" (UniqueName: \"kubernetes.io/projected/e136634f-2944-42c8-bd08-517411c92754-kube-api-access-cdcvx\") pod \"memcached-0\" (UID: \"e136634f-2944-42c8-bd08-517411c92754\") " pod="openstack/memcached-0" Dec 04 17:48:13 crc kubenswrapper[4631]: I1204 17:48:13.118447 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 04 17:48:14 crc kubenswrapper[4631]: I1204 17:48:14.969170 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 04 17:48:14 crc kubenswrapper[4631]: I1204 17:48:14.972173 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 04 17:48:14 crc kubenswrapper[4631]: I1204 17:48:14.976185 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-6j52l" Dec 04 17:48:14 crc kubenswrapper[4631]: I1204 17:48:14.983364 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 04 17:48:15 crc kubenswrapper[4631]: I1204 17:48:15.163135 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfw4d\" (UniqueName: \"kubernetes.io/projected/f017066e-42ab-4e68-891d-5df98da845a9-kube-api-access-gfw4d\") pod \"kube-state-metrics-0\" (UID: \"f017066e-42ab-4e68-891d-5df98da845a9\") " pod="openstack/kube-state-metrics-0" Dec 04 17:48:15 crc kubenswrapper[4631]: I1204 17:48:15.264933 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfw4d\" (UniqueName: \"kubernetes.io/projected/f017066e-42ab-4e68-891d-5df98da845a9-kube-api-access-gfw4d\") pod \"kube-state-metrics-0\" (UID: \"f017066e-42ab-4e68-891d-5df98da845a9\") " pod="openstack/kube-state-metrics-0" Dec 04 17:48:15 crc kubenswrapper[4631]: I1204 17:48:15.287814 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfw4d\" (UniqueName: \"kubernetes.io/projected/f017066e-42ab-4e68-891d-5df98da845a9-kube-api-access-gfw4d\") pod \"kube-state-metrics-0\" (UID: \"f017066e-42ab-4e68-891d-5df98da845a9\") " pod="openstack/kube-state-metrics-0" Dec 04 17:48:15 crc kubenswrapper[4631]: I1204 17:48:15.305895 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 04 17:48:17 crc kubenswrapper[4631]: I1204 17:48:17.885261 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-2vnfm"] Dec 04 17:48:17 crc kubenswrapper[4631]: I1204 17:48:17.886755 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:17 crc kubenswrapper[4631]: I1204 17:48:17.890829 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-2hwch" Dec 04 17:48:17 crc kubenswrapper[4631]: I1204 17:48:17.891384 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 04 17:48:17 crc kubenswrapper[4631]: I1204 17:48:17.891542 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 04 17:48:17 crc kubenswrapper[4631]: I1204 17:48:17.900540 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-2vnfm"] Dec 04 17:48:17 crc kubenswrapper[4631]: I1204 17:48:17.910459 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-gsp75"] Dec 04 17:48:17 crc kubenswrapper[4631]: I1204 17:48:17.911964 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:17 crc kubenswrapper[4631]: I1204 17:48:17.934026 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-gsp75"] Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.008402 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/10032f10-bb41-4039-a44d-ca336b45d4df-var-log-ovn\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.008445 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8d34f815-7011-438b-8c8c-45363f359101-etc-ovs\") pod \"ovn-controller-ovs-gsp75\" (UID: \"8d34f815-7011-438b-8c8c-45363f359101\") " pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.008467 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/10032f10-bb41-4039-a44d-ca336b45d4df-var-run-ovn\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.008486 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8d34f815-7011-438b-8c8c-45363f359101-var-log\") pod \"ovn-controller-ovs-gsp75\" (UID: \"8d34f815-7011-438b-8c8c-45363f359101\") " pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.008499 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8d34f815-7011-438b-8c8c-45363f359101-var-lib\") pod \"ovn-controller-ovs-gsp75\" (UID: \"8d34f815-7011-438b-8c8c-45363f359101\") " pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.008517 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7wph\" (UniqueName: \"kubernetes.io/projected/10032f10-bb41-4039-a44d-ca336b45d4df-kube-api-access-f7wph\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.008551 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/10032f10-bb41-4039-a44d-ca336b45d4df-ovn-controller-tls-certs\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.008573 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/10032f10-bb41-4039-a44d-ca336b45d4df-var-run\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.008593 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8d34f815-7011-438b-8c8c-45363f359101-scripts\") pod \"ovn-controller-ovs-gsp75\" (UID: \"8d34f815-7011-438b-8c8c-45363f359101\") " pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.008607 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/10032f10-bb41-4039-a44d-ca336b45d4df-scripts\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.008623 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8d34f815-7011-438b-8c8c-45363f359101-var-run\") pod \"ovn-controller-ovs-gsp75\" (UID: \"8d34f815-7011-438b-8c8c-45363f359101\") " pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.008643 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5w6mj\" (UniqueName: \"kubernetes.io/projected/8d34f815-7011-438b-8c8c-45363f359101-kube-api-access-5w6mj\") pod \"ovn-controller-ovs-gsp75\" (UID: \"8d34f815-7011-438b-8c8c-45363f359101\") " pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.008666 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10032f10-bb41-4039-a44d-ca336b45d4df-combined-ca-bundle\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.109802 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/10032f10-bb41-4039-a44d-ca336b45d4df-var-log-ovn\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.109866 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8d34f815-7011-438b-8c8c-45363f359101-etc-ovs\") pod \"ovn-controller-ovs-gsp75\" (UID: \"8d34f815-7011-438b-8c8c-45363f359101\") " pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.109891 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/10032f10-bb41-4039-a44d-ca336b45d4df-var-run-ovn\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.109910 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8d34f815-7011-438b-8c8c-45363f359101-var-log\") pod \"ovn-controller-ovs-gsp75\" (UID: \"8d34f815-7011-438b-8c8c-45363f359101\") " pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.109926 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8d34f815-7011-438b-8c8c-45363f359101-var-lib\") pod \"ovn-controller-ovs-gsp75\" (UID: \"8d34f815-7011-438b-8c8c-45363f359101\") " pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.109945 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7wph\" (UniqueName: \"kubernetes.io/projected/10032f10-bb41-4039-a44d-ca336b45d4df-kube-api-access-f7wph\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.109975 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/10032f10-bb41-4039-a44d-ca336b45d4df-ovn-controller-tls-certs\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.109995 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/10032f10-bb41-4039-a44d-ca336b45d4df-var-run\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.110017 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8d34f815-7011-438b-8c8c-45363f359101-scripts\") pod \"ovn-controller-ovs-gsp75\" (UID: \"8d34f815-7011-438b-8c8c-45363f359101\") " pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.110034 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/10032f10-bb41-4039-a44d-ca336b45d4df-scripts\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.110050 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8d34f815-7011-438b-8c8c-45363f359101-var-run\") pod \"ovn-controller-ovs-gsp75\" (UID: \"8d34f815-7011-438b-8c8c-45363f359101\") " pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.110070 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5w6mj\" (UniqueName: \"kubernetes.io/projected/8d34f815-7011-438b-8c8c-45363f359101-kube-api-access-5w6mj\") pod \"ovn-controller-ovs-gsp75\" (UID: \"8d34f815-7011-438b-8c8c-45363f359101\") " pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.110094 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10032f10-bb41-4039-a44d-ca336b45d4df-combined-ca-bundle\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.110399 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/10032f10-bb41-4039-a44d-ca336b45d4df-var-log-ovn\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.111141 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/10032f10-bb41-4039-a44d-ca336b45d4df-var-run\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.111245 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8d34f815-7011-438b-8c8c-45363f359101-var-log\") pod \"ovn-controller-ovs-gsp75\" (UID: \"8d34f815-7011-438b-8c8c-45363f359101\") " pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.111570 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/10032f10-bb41-4039-a44d-ca336b45d4df-var-run-ovn\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.111589 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8d34f815-7011-438b-8c8c-45363f359101-var-lib\") pod \"ovn-controller-ovs-gsp75\" (UID: \"8d34f815-7011-438b-8c8c-45363f359101\") " pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.111618 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8d34f815-7011-438b-8c8c-45363f359101-var-run\") pod \"ovn-controller-ovs-gsp75\" (UID: \"8d34f815-7011-438b-8c8c-45363f359101\") " pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.111970 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8d34f815-7011-438b-8c8c-45363f359101-etc-ovs\") pod \"ovn-controller-ovs-gsp75\" (UID: \"8d34f815-7011-438b-8c8c-45363f359101\") " pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.113161 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8d34f815-7011-438b-8c8c-45363f359101-scripts\") pod \"ovn-controller-ovs-gsp75\" (UID: \"8d34f815-7011-438b-8c8c-45363f359101\") " pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.113206 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/10032f10-bb41-4039-a44d-ca336b45d4df-scripts\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.120484 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10032f10-bb41-4039-a44d-ca336b45d4df-combined-ca-bundle\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.131193 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5w6mj\" (UniqueName: \"kubernetes.io/projected/8d34f815-7011-438b-8c8c-45363f359101-kube-api-access-5w6mj\") pod \"ovn-controller-ovs-gsp75\" (UID: \"8d34f815-7011-438b-8c8c-45363f359101\") " pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.132637 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7wph\" (UniqueName: \"kubernetes.io/projected/10032f10-bb41-4039-a44d-ca336b45d4df-kube-api-access-f7wph\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.136603 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/10032f10-bb41-4039-a44d-ca336b45d4df-ovn-controller-tls-certs\") pod \"ovn-controller-2vnfm\" (UID: \"10032f10-bb41-4039-a44d-ca336b45d4df\") " pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.207350 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.257691 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.742149 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.743578 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.747244 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.747577 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.747646 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-22s2t" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.749213 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.753674 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.773125 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.820142 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe7546ca-3ffc-4d40-b075-00254781f008-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.820216 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fe7546ca-3ffc-4d40-b075-00254781f008-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.820242 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe7546ca-3ffc-4d40-b075-00254781f008-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.820406 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe7546ca-3ffc-4d40-b075-00254781f008-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.820460 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fz5w5\" (UniqueName: \"kubernetes.io/projected/fe7546ca-3ffc-4d40-b075-00254781f008-kube-api-access-fz5w5\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.820619 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe7546ca-3ffc-4d40-b075-00254781f008-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.820685 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe7546ca-3ffc-4d40-b075-00254781f008-config\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.820746 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.922610 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fe7546ca-3ffc-4d40-b075-00254781f008-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.922660 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe7546ca-3ffc-4d40-b075-00254781f008-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.922704 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe7546ca-3ffc-4d40-b075-00254781f008-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.922735 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fz5w5\" (UniqueName: \"kubernetes.io/projected/fe7546ca-3ffc-4d40-b075-00254781f008-kube-api-access-fz5w5\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.922802 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe7546ca-3ffc-4d40-b075-00254781f008-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.922857 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe7546ca-3ffc-4d40-b075-00254781f008-config\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.923412 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.923457 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe7546ca-3ffc-4d40-b075-00254781f008-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.923676 4631 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.923714 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fe7546ca-3ffc-4d40-b075-00254781f008-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.923916 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe7546ca-3ffc-4d40-b075-00254781f008-config\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.926401 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe7546ca-3ffc-4d40-b075-00254781f008-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.933144 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe7546ca-3ffc-4d40-b075-00254781f008-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.935562 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe7546ca-3ffc-4d40-b075-00254781f008-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.936021 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe7546ca-3ffc-4d40-b075-00254781f008-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.944266 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:18 crc kubenswrapper[4631]: I1204 17:48:18.945114 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fz5w5\" (UniqueName: \"kubernetes.io/projected/fe7546ca-3ffc-4d40-b075-00254781f008-kube-api-access-fz5w5\") pod \"ovsdbserver-nb-0\" (UID: \"fe7546ca-3ffc-4d40-b075-00254781f008\") " pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:19 crc kubenswrapper[4631]: I1204 17:48:19.069356 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.053141 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.056472 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.058744 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-tq8wq" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.059437 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.059756 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.060203 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.075267 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.170267 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.170331 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0339f65-9966-4790-a7d2-954145c70f7b-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.170446 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0339f65-9966-4790-a7d2-954145c70f7b-config\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.170530 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4qhp\" (UniqueName: \"kubernetes.io/projected/b0339f65-9966-4790-a7d2-954145c70f7b-kube-api-access-c4qhp\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.170607 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b0339f65-9966-4790-a7d2-954145c70f7b-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.170895 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0339f65-9966-4790-a7d2-954145c70f7b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.170949 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b0339f65-9966-4790-a7d2-954145c70f7b-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.170971 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0339f65-9966-4790-a7d2-954145c70f7b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.272528 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0339f65-9966-4790-a7d2-954145c70f7b-config\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.272573 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4qhp\" (UniqueName: \"kubernetes.io/projected/b0339f65-9966-4790-a7d2-954145c70f7b-kube-api-access-c4qhp\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.272599 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b0339f65-9966-4790-a7d2-954145c70f7b-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.272619 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0339f65-9966-4790-a7d2-954145c70f7b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.272642 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b0339f65-9966-4790-a7d2-954145c70f7b-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.272660 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0339f65-9966-4790-a7d2-954145c70f7b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.272699 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.272723 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0339f65-9966-4790-a7d2-954145c70f7b-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.273938 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b0339f65-9966-4790-a7d2-954145c70f7b-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.274136 4631 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.274262 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0339f65-9966-4790-a7d2-954145c70f7b-config\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.274497 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b0339f65-9966-4790-a7d2-954145c70f7b-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.278181 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0339f65-9966-4790-a7d2-954145c70f7b-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.286162 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0339f65-9966-4790-a7d2-954145c70f7b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.286971 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0339f65-9966-4790-a7d2-954145c70f7b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.288799 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4qhp\" (UniqueName: \"kubernetes.io/projected/b0339f65-9966-4790-a7d2-954145c70f7b-kube-api-access-c4qhp\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.293237 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b0339f65-9966-4790-a7d2-954145c70f7b\") " pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:22 crc kubenswrapper[4631]: I1204 17:48:22.416203 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:27 crc kubenswrapper[4631]: E1204 17:48:27.397749 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 04 17:48:27 crc kubenswrapper[4631]: E1204 17:48:27.398352 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9tqm6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-nw8pm_openstack(57d7d18e-e52d-417d-b62d-113550d69b79): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:48:27 crc kubenswrapper[4631]: E1204 17:48:27.399829 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-nw8pm" podUID="57d7d18e-e52d-417d-b62d-113550d69b79" Dec 04 17:48:27 crc kubenswrapper[4631]: E1204 17:48:27.483121 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 04 17:48:27 crc kubenswrapper[4631]: E1204 17:48:27.483248 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nfdh5dfhb6h64h676hc4h78h97h669h54chfbh696hb5h54bh5d4h6bh64h644h677h584h5cbh698h9dh5bbh5f8h5b8hcdh644h5c7h694hbfh589q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-spfvb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5ccc8479f9-hjd8c_openstack(a3425ef2-08d1-4d79-b36e-03852e0e0750): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:48:27 crc kubenswrapper[4631]: E1204 17:48:27.485226 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5ccc8479f9-hjd8c" podUID="a3425ef2-08d1-4d79-b36e-03852e0e0750" Dec 04 17:48:27 crc kubenswrapper[4631]: E1204 17:48:27.524990 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 04 17:48:27 crc kubenswrapper[4631]: E1204 17:48:27.525204 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4rgqg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-4xx6b_openstack(b927f9cb-62ad-4b2d-99a6-4309bccc7ecf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:48:27 crc kubenswrapper[4631]: E1204 17:48:27.526671 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-4xx6b" podUID="b927f9cb-62ad-4b2d-99a6-4309bccc7ecf" Dec 04 17:48:27 crc kubenswrapper[4631]: E1204 17:48:27.583691 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 04 17:48:27 crc kubenswrapper[4631]: E1204 17:48:27.583856 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-btgr8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-lbfcv_openstack(7ba62723-e502-40e0-bca9-57f552d5a715): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:48:27 crc kubenswrapper[4631]: E1204 17:48:27.585066 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-lbfcv" podUID="7ba62723-e502-40e0-bca9-57f552d5a715" Dec 04 17:48:27 crc kubenswrapper[4631]: I1204 17:48:27.770865 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 04 17:48:28 crc kubenswrapper[4631]: E1204 17:48:28.410567 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-5ccc8479f9-hjd8c" podUID="a3425ef2-08d1-4d79-b36e-03852e0e0750" Dec 04 17:48:28 crc kubenswrapper[4631]: E1204 17:48:28.410906 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-nw8pm" podUID="57d7d18e-e52d-417d-b62d-113550d69b79" Dec 04 17:48:28 crc kubenswrapper[4631]: W1204 17:48:28.729067 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode136634f_2944_42c8_bd08_517411c92754.slice/crio-d164267b85676b151a8c2cffdde75a8dd9bb6a9dd1cbf48d0354a73c1721ef56 WatchSource:0}: Error finding container d164267b85676b151a8c2cffdde75a8dd9bb6a9dd1cbf48d0354a73c1721ef56: Status 404 returned error can't find the container with id d164267b85676b151a8c2cffdde75a8dd9bb6a9dd1cbf48d0354a73c1721ef56 Dec 04 17:48:28 crc kubenswrapper[4631]: I1204 17:48:28.899537 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-4xx6b" Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.013993 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b927f9cb-62ad-4b2d-99a6-4309bccc7ecf-config\") pod \"b927f9cb-62ad-4b2d-99a6-4309bccc7ecf\" (UID: \"b927f9cb-62ad-4b2d-99a6-4309bccc7ecf\") " Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.014052 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rgqg\" (UniqueName: \"kubernetes.io/projected/b927f9cb-62ad-4b2d-99a6-4309bccc7ecf-kube-api-access-4rgqg\") pod \"b927f9cb-62ad-4b2d-99a6-4309bccc7ecf\" (UID: \"b927f9cb-62ad-4b2d-99a6-4309bccc7ecf\") " Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.015555 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b927f9cb-62ad-4b2d-99a6-4309bccc7ecf-config" (OuterVolumeSpecName: "config") pod "b927f9cb-62ad-4b2d-99a6-4309bccc7ecf" (UID: "b927f9cb-62ad-4b2d-99a6-4309bccc7ecf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.019613 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-lbfcv" Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.021792 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b927f9cb-62ad-4b2d-99a6-4309bccc7ecf-kube-api-access-4rgqg" (OuterVolumeSpecName: "kube-api-access-4rgqg") pod "b927f9cb-62ad-4b2d-99a6-4309bccc7ecf" (UID: "b927f9cb-62ad-4b2d-99a6-4309bccc7ecf"). InnerVolumeSpecName "kube-api-access-4rgqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.116278 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ba62723-e502-40e0-bca9-57f552d5a715-dns-svc\") pod \"7ba62723-e502-40e0-bca9-57f552d5a715\" (UID: \"7ba62723-e502-40e0-bca9-57f552d5a715\") " Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.116376 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btgr8\" (UniqueName: \"kubernetes.io/projected/7ba62723-e502-40e0-bca9-57f552d5a715-kube-api-access-btgr8\") pod \"7ba62723-e502-40e0-bca9-57f552d5a715\" (UID: \"7ba62723-e502-40e0-bca9-57f552d5a715\") " Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.116495 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ba62723-e502-40e0-bca9-57f552d5a715-config\") pod \"7ba62723-e502-40e0-bca9-57f552d5a715\" (UID: \"7ba62723-e502-40e0-bca9-57f552d5a715\") " Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.116815 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b927f9cb-62ad-4b2d-99a6-4309bccc7ecf-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.116830 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rgqg\" (UniqueName: \"kubernetes.io/projected/b927f9cb-62ad-4b2d-99a6-4309bccc7ecf-kube-api-access-4rgqg\") on node \"crc\" DevicePath \"\"" Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.117427 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ba62723-e502-40e0-bca9-57f552d5a715-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7ba62723-e502-40e0-bca9-57f552d5a715" (UID: "7ba62723-e502-40e0-bca9-57f552d5a715"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.117544 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ba62723-e502-40e0-bca9-57f552d5a715-config" (OuterVolumeSpecName: "config") pod "7ba62723-e502-40e0-bca9-57f552d5a715" (UID: "7ba62723-e502-40e0-bca9-57f552d5a715"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.120062 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ba62723-e502-40e0-bca9-57f552d5a715-kube-api-access-btgr8" (OuterVolumeSpecName: "kube-api-access-btgr8") pod "7ba62723-e502-40e0-bca9-57f552d5a715" (UID: "7ba62723-e502-40e0-bca9-57f552d5a715"). InnerVolumeSpecName "kube-api-access-btgr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.218571 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btgr8\" (UniqueName: \"kubernetes.io/projected/7ba62723-e502-40e0-bca9-57f552d5a715-kube-api-access-btgr8\") on node \"crc\" DevicePath \"\"" Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.218606 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ba62723-e502-40e0-bca9-57f552d5a715-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.218616 4631 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ba62723-e502-40e0-bca9-57f552d5a715-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.261039 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.353511 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 04 17:48:29 crc kubenswrapper[4631]: W1204 17:48:29.354609 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c7df533_7298_4204_aeca_992631c9ccb6.slice/crio-2b7e3c9b1ff3804e8680ec8a0e6003f92a367e51478ee68ff0576b111c940adc WatchSource:0}: Error finding container 2b7e3c9b1ff3804e8680ec8a0e6003f92a367e51478ee68ff0576b111c940adc: Status 404 returned error can't find the container with id 2b7e3c9b1ff3804e8680ec8a0e6003f92a367e51478ee68ff0576b111c940adc Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.361600 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.401538 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-lbfcv" event={"ID":"7ba62723-e502-40e0-bca9-57f552d5a715","Type":"ContainerDied","Data":"629a4cd4940bfc4d0fec624b4b6500b7352c782fb777e87c61dd0df1ced35833"} Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.401658 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-lbfcv" Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.404977 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4c1e28f2-5820-4e06-a20b-a9062d8280be","Type":"ContainerStarted","Data":"6175dc2ddc2acc638941d6bb4be1448062e4af4d934858a35423c94031f480e5"} Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.405962 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"e136634f-2944-42c8-bd08-517411c92754","Type":"ContainerStarted","Data":"d164267b85676b151a8c2cffdde75a8dd9bb6a9dd1cbf48d0354a73c1721ef56"} Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.407043 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f017066e-42ab-4e68-891d-5df98da845a9","Type":"ContainerStarted","Data":"ef99ad66b5c7c1d061580f8b49ae0c05977d3450614e7fd77fa196592afbff06"} Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.408652 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"0c7df533-7298-4204-aeca-992631c9ccb6","Type":"ContainerStarted","Data":"2b7e3c9b1ff3804e8680ec8a0e6003f92a367e51478ee68ff0576b111c940adc"} Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.409810 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-4xx6b" event={"ID":"b927f9cb-62ad-4b2d-99a6-4309bccc7ecf","Type":"ContainerDied","Data":"b9a28874166a139df410cb619fdd7b6571eb2cebb8fe3f4a8d2c80c566578edc"} Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.409912 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-4xx6b" Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.494558 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4xx6b"] Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.499023 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-2vnfm"] Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.539267 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4xx6b"] Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.552022 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-lbfcv"] Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.556572 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-lbfcv"] Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.582999 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 04 17:48:29 crc kubenswrapper[4631]: W1204 17:48:29.586598 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe7546ca_3ffc_4d40_b075_00254781f008.slice/crio-96b2728f739b14c59a23b1e8070a1d8a251ec70a3de2fb64a1b295b594d7a11c WatchSource:0}: Error finding container 96b2728f739b14c59a23b1e8070a1d8a251ec70a3de2fb64a1b295b594d7a11c: Status 404 returned error can't find the container with id 96b2728f739b14c59a23b1e8070a1d8a251ec70a3de2fb64a1b295b594d7a11c Dec 04 17:48:29 crc kubenswrapper[4631]: I1204 17:48:29.638157 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-gsp75"] Dec 04 17:48:29 crc kubenswrapper[4631]: W1204 17:48:29.641773 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8d34f815_7011_438b_8c8c_45363f359101.slice/crio-cfbb3e193c00d4837916d8d7d2f76eed8d7a978e440ecb67883e862a539e73da WatchSource:0}: Error finding container cfbb3e193c00d4837916d8d7d2f76eed8d7a978e440ecb67883e862a539e73da: Status 404 returned error can't find the container with id cfbb3e193c00d4837916d8d7d2f76eed8d7a978e440ecb67883e862a539e73da Dec 04 17:48:30 crc kubenswrapper[4631]: I1204 17:48:30.250296 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ba62723-e502-40e0-bca9-57f552d5a715" path="/var/lib/kubelet/pods/7ba62723-e502-40e0-bca9-57f552d5a715/volumes" Dec 04 17:48:30 crc kubenswrapper[4631]: I1204 17:48:30.251041 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b927f9cb-62ad-4b2d-99a6-4309bccc7ecf" path="/var/lib/kubelet/pods/b927f9cb-62ad-4b2d-99a6-4309bccc7ecf/volumes" Dec 04 17:48:30 crc kubenswrapper[4631]: I1204 17:48:30.419461 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2vnfm" event={"ID":"10032f10-bb41-4039-a44d-ca336b45d4df","Type":"ContainerStarted","Data":"4de2653776ab6b95ffda67ffc94a118ae15118056cd212cf90752677400fc031"} Dec 04 17:48:30 crc kubenswrapper[4631]: I1204 17:48:30.422111 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef","Type":"ContainerStarted","Data":"e833e3040ea35a329a540f414bd27255b574d3b651ea3a7efba64fa3fed817a2"} Dec 04 17:48:30 crc kubenswrapper[4631]: I1204 17:48:30.425190 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gsp75" event={"ID":"8d34f815-7011-438b-8c8c-45363f359101","Type":"ContainerStarted","Data":"cfbb3e193c00d4837916d8d7d2f76eed8d7a978e440ecb67883e862a539e73da"} Dec 04 17:48:30 crc kubenswrapper[4631]: I1204 17:48:30.426798 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"fe7546ca-3ffc-4d40-b075-00254781f008","Type":"ContainerStarted","Data":"96b2728f739b14c59a23b1e8070a1d8a251ec70a3de2fb64a1b295b594d7a11c"} Dec 04 17:48:30 crc kubenswrapper[4631]: I1204 17:48:30.427904 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a71b38c4-ee61-49f2-8c8c-5adc05df2159","Type":"ContainerStarted","Data":"f3df45d8c13bc0c4ea4338ca7db675f52fb4e8786e9eaea1853623a900588c8c"} Dec 04 17:48:30 crc kubenswrapper[4631]: I1204 17:48:30.546850 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 04 17:48:31 crc kubenswrapper[4631]: I1204 17:48:31.438444 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b0339f65-9966-4790-a7d2-954145c70f7b","Type":"ContainerStarted","Data":"b14a17aed843ccc7b24a3ddf4940ba451433dda7b634c075ac4d21a901650879"} Dec 04 17:48:39 crc kubenswrapper[4631]: I1204 17:48:39.510533 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"0c7df533-7298-4204-aeca-992631c9ccb6","Type":"ContainerStarted","Data":"486f461d776c5703624911a6fa346714947eeeaf29f3e98ebc2298224df8c54b"} Dec 04 17:48:39 crc kubenswrapper[4631]: I1204 17:48:39.511707 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4c1e28f2-5820-4e06-a20b-a9062d8280be","Type":"ContainerStarted","Data":"1221f349d2a083bfcea0fac3dbad5882622e0a6e81f25aeb58aec79e1148fb4e"} Dec 04 17:48:39 crc kubenswrapper[4631]: I1204 17:48:39.513972 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b0339f65-9966-4790-a7d2-954145c70f7b","Type":"ContainerStarted","Data":"61c129b163bfc9451461d3369466b7b88589dc359d659dfd07ab6013e61c1e8f"} Dec 04 17:48:39 crc kubenswrapper[4631]: I1204 17:48:39.515471 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"e136634f-2944-42c8-bd08-517411c92754","Type":"ContainerStarted","Data":"38de9e7cecc31afb6d0bd21caa96615ee2bc1fe1f392162a5a7de2e9c571fcd3"} Dec 04 17:48:39 crc kubenswrapper[4631]: I1204 17:48:39.515927 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 04 17:48:39 crc kubenswrapper[4631]: I1204 17:48:39.517618 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f017066e-42ab-4e68-891d-5df98da845a9","Type":"ContainerStarted","Data":"75cf2585cda179f2ffdf506784f140fbc680a692784338a16fc4282e00ecd9ff"} Dec 04 17:48:39 crc kubenswrapper[4631]: I1204 17:48:39.517757 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 04 17:48:39 crc kubenswrapper[4631]: I1204 17:48:39.519283 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2vnfm" event={"ID":"10032f10-bb41-4039-a44d-ca336b45d4df","Type":"ContainerStarted","Data":"d3aed39080bbd2124299b78c986b2085b42c1bcb8556533c3fa4453e30e16af6"} Dec 04 17:48:39 crc kubenswrapper[4631]: I1204 17:48:39.519349 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-2vnfm" Dec 04 17:48:39 crc kubenswrapper[4631]: I1204 17:48:39.520728 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gsp75" event={"ID":"8d34f815-7011-438b-8c8c-45363f359101","Type":"ContainerStarted","Data":"5edbd0f686423440c13c1dfa5d29bb6a7ab696e8b2224d7a48e2e3be75bb2db3"} Dec 04 17:48:39 crc kubenswrapper[4631]: I1204 17:48:39.523166 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"fe7546ca-3ffc-4d40-b075-00254781f008","Type":"ContainerStarted","Data":"3e8661f24916e3eafeb6eb1a9027ae15108cb6fbfb89d7ff57615a550f49f604"} Dec 04 17:48:39 crc kubenswrapper[4631]: I1204 17:48:39.605066 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=24.942447806 podStartE2EDuration="27.605049048s" podCreationTimestamp="2025-12-04 17:48:12 +0000 UTC" firstStartedPulling="2025-12-04 17:48:28.733886665 +0000 UTC m=+1238.766128663" lastFinishedPulling="2025-12-04 17:48:31.396487907 +0000 UTC m=+1241.428729905" observedRunningTime="2025-12-04 17:48:39.593650541 +0000 UTC m=+1249.625892539" watchObservedRunningTime="2025-12-04 17:48:39.605049048 +0000 UTC m=+1249.637291046" Dec 04 17:48:39 crc kubenswrapper[4631]: I1204 17:48:39.657672 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-2vnfm" podStartSLOduration=13.611527396 podStartE2EDuration="22.65765757s" podCreationTimestamp="2025-12-04 17:48:17 +0000 UTC" firstStartedPulling="2025-12-04 17:48:29.527270775 +0000 UTC m=+1239.559512773" lastFinishedPulling="2025-12-04 17:48:38.573400949 +0000 UTC m=+1248.605642947" observedRunningTime="2025-12-04 17:48:39.654317354 +0000 UTC m=+1249.686559352" watchObservedRunningTime="2025-12-04 17:48:39.65765757 +0000 UTC m=+1249.689899568" Dec 04 17:48:39 crc kubenswrapper[4631]: I1204 17:48:39.676118 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=16.173865599 podStartE2EDuration="25.676093739s" podCreationTimestamp="2025-12-04 17:48:14 +0000 UTC" firstStartedPulling="2025-12-04 17:48:29.280254362 +0000 UTC m=+1239.312496360" lastFinishedPulling="2025-12-04 17:48:38.782482502 +0000 UTC m=+1248.814724500" observedRunningTime="2025-12-04 17:48:39.669086108 +0000 UTC m=+1249.701328106" watchObservedRunningTime="2025-12-04 17:48:39.676093739 +0000 UTC m=+1249.708335747" Dec 04 17:48:40 crc kubenswrapper[4631]: I1204 17:48:40.536131 4631 generic.go:334] "Generic (PLEG): container finished" podID="8d34f815-7011-438b-8c8c-45363f359101" containerID="5edbd0f686423440c13c1dfa5d29bb6a7ab696e8b2224d7a48e2e3be75bb2db3" exitCode=0 Dec 04 17:48:40 crc kubenswrapper[4631]: I1204 17:48:40.536305 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gsp75" event={"ID":"8d34f815-7011-438b-8c8c-45363f359101","Type":"ContainerDied","Data":"5edbd0f686423440c13c1dfa5d29bb6a7ab696e8b2224d7a48e2e3be75bb2db3"} Dec 04 17:48:41 crc kubenswrapper[4631]: I1204 17:48:41.549752 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gsp75" event={"ID":"8d34f815-7011-438b-8c8c-45363f359101","Type":"ContainerStarted","Data":"ead63f2a1d825956d6ffd812f6b1a0b8761c016a4ffe06b73df1f6fbd258ae4e"} Dec 04 17:48:41 crc kubenswrapper[4631]: I1204 17:48:41.550144 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gsp75" event={"ID":"8d34f815-7011-438b-8c8c-45363f359101","Type":"ContainerStarted","Data":"dbab833a2de536c183515a9caafb51e4830f76ba1c8342cc7966f0915e515b22"} Dec 04 17:48:41 crc kubenswrapper[4631]: I1204 17:48:41.550190 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:41 crc kubenswrapper[4631]: I1204 17:48:41.550213 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:48:41 crc kubenswrapper[4631]: I1204 17:48:41.581641 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-gsp75" podStartSLOduration=16.322844778 podStartE2EDuration="24.581611744s" podCreationTimestamp="2025-12-04 17:48:17 +0000 UTC" firstStartedPulling="2025-12-04 17:48:29.646523189 +0000 UTC m=+1239.678765177" lastFinishedPulling="2025-12-04 17:48:37.905290145 +0000 UTC m=+1247.937532143" observedRunningTime="2025-12-04 17:48:41.568567119 +0000 UTC m=+1251.600809127" watchObservedRunningTime="2025-12-04 17:48:41.581611744 +0000 UTC m=+1251.613853762" Dec 04 17:48:43 crc kubenswrapper[4631]: I1204 17:48:43.114911 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 04 17:48:43 crc kubenswrapper[4631]: I1204 17:48:43.565884 4631 generic.go:334] "Generic (PLEG): container finished" podID="4c1e28f2-5820-4e06-a20b-a9062d8280be" containerID="1221f349d2a083bfcea0fac3dbad5882622e0a6e81f25aeb58aec79e1148fb4e" exitCode=0 Dec 04 17:48:43 crc kubenswrapper[4631]: I1204 17:48:43.565977 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4c1e28f2-5820-4e06-a20b-a9062d8280be","Type":"ContainerDied","Data":"1221f349d2a083bfcea0fac3dbad5882622e0a6e81f25aeb58aec79e1148fb4e"} Dec 04 17:48:43 crc kubenswrapper[4631]: I1204 17:48:43.568979 4631 generic.go:334] "Generic (PLEG): container finished" podID="0c7df533-7298-4204-aeca-992631c9ccb6" containerID="486f461d776c5703624911a6fa346714947eeeaf29f3e98ebc2298224df8c54b" exitCode=0 Dec 04 17:48:43 crc kubenswrapper[4631]: I1204 17:48:43.569012 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"0c7df533-7298-4204-aeca-992631c9ccb6","Type":"ContainerDied","Data":"486f461d776c5703624911a6fa346714947eeeaf29f3e98ebc2298224df8c54b"} Dec 04 17:48:45 crc kubenswrapper[4631]: I1204 17:48:45.316550 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 04 17:48:45 crc kubenswrapper[4631]: I1204 17:48:45.652675 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-nw8pm"] Dec 04 17:48:45 crc kubenswrapper[4631]: I1204 17:48:45.714863 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-n7wbr"] Dec 04 17:48:45 crc kubenswrapper[4631]: I1204 17:48:45.717601 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" Dec 04 17:48:45 crc kubenswrapper[4631]: I1204 17:48:45.778447 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-n7wbr"] Dec 04 17:48:45 crc kubenswrapper[4631]: I1204 17:48:45.820112 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0a74f92-98dc-437d-8ab1-784e90727a5d-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-n7wbr\" (UID: \"a0a74f92-98dc-437d-8ab1-784e90727a5d\") " pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" Dec 04 17:48:45 crc kubenswrapper[4631]: I1204 17:48:45.820189 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99gvq\" (UniqueName: \"kubernetes.io/projected/a0a74f92-98dc-437d-8ab1-784e90727a5d-kube-api-access-99gvq\") pod \"dnsmasq-dns-7cb5889db5-n7wbr\" (UID: \"a0a74f92-98dc-437d-8ab1-784e90727a5d\") " pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" Dec 04 17:48:45 crc kubenswrapper[4631]: I1204 17:48:45.820241 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0a74f92-98dc-437d-8ab1-784e90727a5d-config\") pod \"dnsmasq-dns-7cb5889db5-n7wbr\" (UID: \"a0a74f92-98dc-437d-8ab1-784e90727a5d\") " pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" Dec 04 17:48:45 crc kubenswrapper[4631]: I1204 17:48:45.921147 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0a74f92-98dc-437d-8ab1-784e90727a5d-config\") pod \"dnsmasq-dns-7cb5889db5-n7wbr\" (UID: \"a0a74f92-98dc-437d-8ab1-784e90727a5d\") " pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" Dec 04 17:48:45 crc kubenswrapper[4631]: I1204 17:48:45.921253 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0a74f92-98dc-437d-8ab1-784e90727a5d-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-n7wbr\" (UID: \"a0a74f92-98dc-437d-8ab1-784e90727a5d\") " pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" Dec 04 17:48:45 crc kubenswrapper[4631]: I1204 17:48:45.921311 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99gvq\" (UniqueName: \"kubernetes.io/projected/a0a74f92-98dc-437d-8ab1-784e90727a5d-kube-api-access-99gvq\") pod \"dnsmasq-dns-7cb5889db5-n7wbr\" (UID: \"a0a74f92-98dc-437d-8ab1-784e90727a5d\") " pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" Dec 04 17:48:45 crc kubenswrapper[4631]: I1204 17:48:45.922393 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0a74f92-98dc-437d-8ab1-784e90727a5d-config\") pod \"dnsmasq-dns-7cb5889db5-n7wbr\" (UID: \"a0a74f92-98dc-437d-8ab1-784e90727a5d\") " pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" Dec 04 17:48:45 crc kubenswrapper[4631]: I1204 17:48:45.922933 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0a74f92-98dc-437d-8ab1-784e90727a5d-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-n7wbr\" (UID: \"a0a74f92-98dc-437d-8ab1-784e90727a5d\") " pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" Dec 04 17:48:45 crc kubenswrapper[4631]: I1204 17:48:45.940832 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99gvq\" (UniqueName: \"kubernetes.io/projected/a0a74f92-98dc-437d-8ab1-784e90727a5d-kube-api-access-99gvq\") pod \"dnsmasq-dns-7cb5889db5-n7wbr\" (UID: \"a0a74f92-98dc-437d-8ab1-784e90727a5d\") " pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" Dec 04 17:48:46 crc kubenswrapper[4631]: I1204 17:48:46.106156 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" Dec 04 17:48:46 crc kubenswrapper[4631]: I1204 17:48:46.595544 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4c1e28f2-5820-4e06-a20b-a9062d8280be","Type":"ContainerStarted","Data":"dc111b43ac04bcf1335e914678b2036e7492881c2b32ffbdfc48339201f32b65"} Dec 04 17:48:46 crc kubenswrapper[4631]: I1204 17:48:46.602636 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"0c7df533-7298-4204-aeca-992631c9ccb6","Type":"ContainerStarted","Data":"e1986ea75d00ad5455f024f50b462b16159d937e540d0f573f0d4430617949fe"} Dec 04 17:48:46 crc kubenswrapper[4631]: I1204 17:48:46.623162 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=26.429980697 podStartE2EDuration="35.623139642s" podCreationTimestamp="2025-12-04 17:48:11 +0000 UTC" firstStartedPulling="2025-12-04 17:48:29.357560162 +0000 UTC m=+1239.389802160" lastFinishedPulling="2025-12-04 17:48:38.550719107 +0000 UTC m=+1248.582961105" observedRunningTime="2025-12-04 17:48:46.619695283 +0000 UTC m=+1256.651937281" watchObservedRunningTime="2025-12-04 17:48:46.623139642 +0000 UTC m=+1256.655381640" Dec 04 17:48:46 crc kubenswrapper[4631]: I1204 17:48:46.643639 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=28.447100459 podStartE2EDuration="37.64362273s" podCreationTimestamp="2025-12-04 17:48:09 +0000 UTC" firstStartedPulling="2025-12-04 17:48:29.356322417 +0000 UTC m=+1239.388564415" lastFinishedPulling="2025-12-04 17:48:38.552844688 +0000 UTC m=+1248.585086686" observedRunningTime="2025-12-04 17:48:46.639963475 +0000 UTC m=+1256.672205473" watchObservedRunningTime="2025-12-04 17:48:46.64362273 +0000 UTC m=+1256.675864728" Dec 04 17:48:46 crc kubenswrapper[4631]: I1204 17:48:46.881419 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-n7wbr"] Dec 04 17:48:46 crc kubenswrapper[4631]: I1204 17:48:46.894886 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 04 17:48:46 crc kubenswrapper[4631]: I1204 17:48:46.900869 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 04 17:48:46 crc kubenswrapper[4631]: I1204 17:48:46.904138 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-7zw6d" Dec 04 17:48:46 crc kubenswrapper[4631]: I1204 17:48:46.904144 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 04 17:48:46 crc kubenswrapper[4631]: I1204 17:48:46.904199 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 04 17:48:46 crc kubenswrapper[4631]: I1204 17:48:46.904272 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 04 17:48:46 crc kubenswrapper[4631]: I1204 17:48:46.937613 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.026659 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8acd1342-fa9f-43be-9c9f-28739a5aed78-cache\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.026716 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-etc-swift\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.026750 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.026790 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8acd1342-fa9f-43be-9c9f-28739a5aed78-lock\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.026814 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9224\" (UniqueName: \"kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-kube-api-access-t9224\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.128436 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8acd1342-fa9f-43be-9c9f-28739a5aed78-lock\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.128516 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9224\" (UniqueName: \"kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-kube-api-access-t9224\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.128602 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8acd1342-fa9f-43be-9c9f-28739a5aed78-cache\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.129041 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-etc-swift\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.129104 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.129120 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8acd1342-fa9f-43be-9c9f-28739a5aed78-cache\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.129203 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8acd1342-fa9f-43be-9c9f-28739a5aed78-lock\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:48:47 crc kubenswrapper[4631]: E1204 17:48:47.129208 4631 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 04 17:48:47 crc kubenswrapper[4631]: E1204 17:48:47.129439 4631 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 04 17:48:47 crc kubenswrapper[4631]: E1204 17:48:47.129476 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-etc-swift podName:8acd1342-fa9f-43be-9c9f-28739a5aed78 nodeName:}" failed. No retries permitted until 2025-12-04 17:48:47.62946271 +0000 UTC m=+1257.661704708 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-etc-swift") pod "swift-storage-0" (UID: "8acd1342-fa9f-43be-9c9f-28739a5aed78") : configmap "swift-ring-files" not found Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.129708 4631 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/swift-storage-0" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.151282 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9224\" (UniqueName: \"kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-kube-api-access-t9224\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.190319 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.503221 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-xjxf8"] Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.504153 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.511121 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.512188 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.516110 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.529294 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-xjxf8"] Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.610724 4631 generic.go:334] "Generic (PLEG): container finished" podID="a3425ef2-08d1-4d79-b36e-03852e0e0750" containerID="19a53491dd5c779d9b09fdfd9d4d943d1f632b53042f7342d1b5d4a0d3a71801" exitCode=0 Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.610786 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-hjd8c" event={"ID":"a3425ef2-08d1-4d79-b36e-03852e0e0750","Type":"ContainerDied","Data":"19a53491dd5c779d9b09fdfd9d4d943d1f632b53042f7342d1b5d4a0d3a71801"} Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.614324 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b0339f65-9966-4790-a7d2-954145c70f7b","Type":"ContainerStarted","Data":"5b79c85ebc7e949b1446da22dd78208699a3a3035c85cee93595b509c24d20fd"} Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.616552 4631 generic.go:334] "Generic (PLEG): container finished" podID="57d7d18e-e52d-417d-b62d-113550d69b79" containerID="e9447093f96aee5e94e232943f488ffb2fbe2bf860c60d769b9a9f39a5d3e11b" exitCode=0 Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.616608 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-nw8pm" event={"ID":"57d7d18e-e52d-417d-b62d-113550d69b79","Type":"ContainerDied","Data":"e9447093f96aee5e94e232943f488ffb2fbe2bf860c60d769b9a9f39a5d3e11b"} Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.622907 4631 generic.go:334] "Generic (PLEG): container finished" podID="a0a74f92-98dc-437d-8ab1-784e90727a5d" containerID="e7095a23c6c6bc6ffab10a8950ad4abbfb1514fe35d2c9c11df22cdee5c02d33" exitCode=0 Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.623000 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" event={"ID":"a0a74f92-98dc-437d-8ab1-784e90727a5d","Type":"ContainerDied","Data":"e7095a23c6c6bc6ffab10a8950ad4abbfb1514fe35d2c9c11df22cdee5c02d33"} Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.623027 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" event={"ID":"a0a74f92-98dc-437d-8ab1-784e90727a5d","Type":"ContainerStarted","Data":"73d30f8eff462021f139d8b625569c891e360719002ad9d29de8be85cdd20c8a"} Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.625066 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"fe7546ca-3ffc-4d40-b075-00254781f008","Type":"ContainerStarted","Data":"4850078039b93e304c4cf5e906f16c17b6a2cb1cae832f6dacee613679cd56f6"} Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.637240 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-etc-swift\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.637307 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-combined-ca-bundle\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.637335 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-ring-data-devices\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.637353 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-etc-swift\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.637395 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-dispersionconf\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.637410 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-swiftconf\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.637434 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhkz7\" (UniqueName: \"kubernetes.io/projected/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-kube-api-access-mhkz7\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.637543 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-scripts\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: E1204 17:48:47.637892 4631 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 04 17:48:47 crc kubenswrapper[4631]: E1204 17:48:47.637977 4631 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 04 17:48:47 crc kubenswrapper[4631]: E1204 17:48:47.638081 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-etc-swift podName:8acd1342-fa9f-43be-9c9f-28739a5aed78 nodeName:}" failed. No retries permitted until 2025-12-04 17:48:48.638063133 +0000 UTC m=+1258.670305131 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-etc-swift") pod "swift-storage-0" (UID: "8acd1342-fa9f-43be-9c9f-28739a5aed78") : configmap "swift-ring-files" not found Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.743010 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-combined-ca-bundle\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.743301 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-ring-data-devices\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.743416 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-etc-swift\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.743519 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-dispersionconf\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.743594 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-swiftconf\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.743683 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhkz7\" (UniqueName: \"kubernetes.io/projected/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-kube-api-access-mhkz7\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.743858 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-scripts\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.746574 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-ring-data-devices\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.747081 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-etc-swift\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.747519 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=11.266401524 podStartE2EDuration="26.747492465s" podCreationTimestamp="2025-12-04 17:48:21 +0000 UTC" firstStartedPulling="2025-12-04 17:48:31.011582305 +0000 UTC m=+1241.043824303" lastFinishedPulling="2025-12-04 17:48:46.492673246 +0000 UTC m=+1256.524915244" observedRunningTime="2025-12-04 17:48:47.707920069 +0000 UTC m=+1257.740162067" watchObservedRunningTime="2025-12-04 17:48:47.747492465 +0000 UTC m=+1257.779734463" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.749180 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-combined-ca-bundle\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.750935 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-scripts\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.753850 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-dispersionconf\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.756446 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=13.677031418 podStartE2EDuration="30.756428292s" podCreationTimestamp="2025-12-04 17:48:17 +0000 UTC" firstStartedPulling="2025-12-04 17:48:29.598084948 +0000 UTC m=+1239.630326946" lastFinishedPulling="2025-12-04 17:48:46.677481822 +0000 UTC m=+1256.709723820" observedRunningTime="2025-12-04 17:48:47.742170152 +0000 UTC m=+1257.774412150" watchObservedRunningTime="2025-12-04 17:48:47.756428292 +0000 UTC m=+1257.788670300" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.758298 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-swiftconf\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.765051 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhkz7\" (UniqueName: \"kubernetes.io/projected/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-kube-api-access-mhkz7\") pod \"swift-ring-rebalance-xjxf8\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: I1204 17:48:47.819521 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:48:47 crc kubenswrapper[4631]: E1204 17:48:47.911629 4631 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 04 17:48:47 crc kubenswrapper[4631]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/a3425ef2-08d1-4d79-b36e-03852e0e0750/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 04 17:48:47 crc kubenswrapper[4631]: > podSandboxID="f01b9cc2082b3750211068ca70ac93331696bf4ad481f1cfded99982444f977a" Dec 04 17:48:47 crc kubenswrapper[4631]: E1204 17:48:47.911782 4631 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 04 17:48:47 crc kubenswrapper[4631]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nfdh5dfhb6h64h676hc4h78h97h669h54chfbh696hb5h54bh5d4h6bh64h644h677h584h5cbh698h9dh5bbh5f8h5b8hcdh644h5c7h694hbfh589q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-spfvb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5ccc8479f9-hjd8c_openstack(a3425ef2-08d1-4d79-b36e-03852e0e0750): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/a3425ef2-08d1-4d79-b36e-03852e0e0750/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 04 17:48:47 crc kubenswrapper[4631]: > logger="UnhandledError" Dec 04 17:48:47 crc kubenswrapper[4631]: E1204 17:48:47.912908 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/a3425ef2-08d1-4d79-b36e-03852e0e0750/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-5ccc8479f9-hjd8c" podUID="a3425ef2-08d1-4d79-b36e-03852e0e0750" Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.126840 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-nw8pm" Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.259191 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57d7d18e-e52d-417d-b62d-113550d69b79-config\") pod \"57d7d18e-e52d-417d-b62d-113550d69b79\" (UID: \"57d7d18e-e52d-417d-b62d-113550d69b79\") " Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.260591 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tqm6\" (UniqueName: \"kubernetes.io/projected/57d7d18e-e52d-417d-b62d-113550d69b79-kube-api-access-9tqm6\") pod \"57d7d18e-e52d-417d-b62d-113550d69b79\" (UID: \"57d7d18e-e52d-417d-b62d-113550d69b79\") " Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.260670 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/57d7d18e-e52d-417d-b62d-113550d69b79-dns-svc\") pod \"57d7d18e-e52d-417d-b62d-113550d69b79\" (UID: \"57d7d18e-e52d-417d-b62d-113550d69b79\") " Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.304798 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57d7d18e-e52d-417d-b62d-113550d69b79-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "57d7d18e-e52d-417d-b62d-113550d69b79" (UID: "57d7d18e-e52d-417d-b62d-113550d69b79"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.317645 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57d7d18e-e52d-417d-b62d-113550d69b79-kube-api-access-9tqm6" (OuterVolumeSpecName: "kube-api-access-9tqm6") pod "57d7d18e-e52d-417d-b62d-113550d69b79" (UID: "57d7d18e-e52d-417d-b62d-113550d69b79"). InnerVolumeSpecName "kube-api-access-9tqm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.341046 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57d7d18e-e52d-417d-b62d-113550d69b79-config" (OuterVolumeSpecName: "config") pod "57d7d18e-e52d-417d-b62d-113550d69b79" (UID: "57d7d18e-e52d-417d-b62d-113550d69b79"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.344579 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-xjxf8"] Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.362988 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57d7d18e-e52d-417d-b62d-113550d69b79-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.363027 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tqm6\" (UniqueName: \"kubernetes.io/projected/57d7d18e-e52d-417d-b62d-113550d69b79-kube-api-access-9tqm6\") on node \"crc\" DevicePath \"\"" Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.363042 4631 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/57d7d18e-e52d-417d-b62d-113550d69b79-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.634067 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-xjxf8" event={"ID":"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f","Type":"ContainerStarted","Data":"94319f74356e73b79e99ed3530a2a4429a52a01d6ad1f04d7c221274736876d1"} Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.636536 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-nw8pm" Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.636734 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-nw8pm" event={"ID":"57d7d18e-e52d-417d-b62d-113550d69b79","Type":"ContainerDied","Data":"f5d2cd08528f39fd7ce850d3739939d3e19c815e11b40cd371a8c632cac72d26"} Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.636861 4631 scope.go:117] "RemoveContainer" containerID="e9447093f96aee5e94e232943f488ffb2fbe2bf860c60d769b9a9f39a5d3e11b" Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.640320 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" event={"ID":"a0a74f92-98dc-437d-8ab1-784e90727a5d","Type":"ContainerStarted","Data":"bc2f5f110cac5a4388df8e54bc990de802b5fbb18160d2f8fe3a8f86cd6d9d4c"} Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.667723 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-etc-swift\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:48:48 crc kubenswrapper[4631]: E1204 17:48:48.668072 4631 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 04 17:48:48 crc kubenswrapper[4631]: E1204 17:48:48.668107 4631 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 04 17:48:48 crc kubenswrapper[4631]: E1204 17:48:48.668275 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-etc-swift podName:8acd1342-fa9f-43be-9c9f-28739a5aed78 nodeName:}" failed. No retries permitted until 2025-12-04 17:48:50.668249733 +0000 UTC m=+1260.700491731 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-etc-swift") pod "swift-storage-0" (UID: "8acd1342-fa9f-43be-9c9f-28739a5aed78") : configmap "swift-ring-files" not found Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.670577 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" podStartSLOduration=3.670555479 podStartE2EDuration="3.670555479s" podCreationTimestamp="2025-12-04 17:48:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:48:48.661542611 +0000 UTC m=+1258.693784619" watchObservedRunningTime="2025-12-04 17:48:48.670555479 +0000 UTC m=+1258.702797487" Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.715916 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-nw8pm"] Dec 04 17:48:48 crc kubenswrapper[4631]: I1204 17:48:48.721217 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-nw8pm"] Dec 04 17:48:49 crc kubenswrapper[4631]: I1204 17:48:49.070940 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:49 crc kubenswrapper[4631]: I1204 17:48:49.071363 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:49 crc kubenswrapper[4631]: I1204 17:48:49.127272 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:49 crc kubenswrapper[4631]: I1204 17:48:49.417395 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:49 crc kubenswrapper[4631]: I1204 17:48:49.463182 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:49 crc kubenswrapper[4631]: I1204 17:48:49.652340 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:49 crc kubenswrapper[4631]: I1204 17:48:49.652392 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" Dec 04 17:48:49 crc kubenswrapper[4631]: I1204 17:48:49.712772 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 04 17:48:49 crc kubenswrapper[4631]: I1204 17:48:49.714329 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.174735 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-hjd8c"] Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.201562 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c89d5d749-9psdj"] Dec 04 17:48:50 crc kubenswrapper[4631]: E1204 17:48:50.202525 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57d7d18e-e52d-417d-b62d-113550d69b79" containerName="init" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.202615 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="57d7d18e-e52d-417d-b62d-113550d69b79" containerName="init" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.202858 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="57d7d18e-e52d-417d-b62d-113550d69b79" containerName="init" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.204231 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.211627 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.227342 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c89d5d749-9psdj"] Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.257437 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57d7d18e-e52d-417d-b62d-113550d69b79" path="/var/lib/kubelet/pods/57d7d18e-e52d-417d-b62d-113550d69b79/volumes" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.314213 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-ovsdbserver-sb\") pod \"dnsmasq-dns-6c89d5d749-9psdj\" (UID: \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\") " pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.314284 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqcqq\" (UniqueName: \"kubernetes.io/projected/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-kube-api-access-hqcqq\") pod \"dnsmasq-dns-6c89d5d749-9psdj\" (UID: \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\") " pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.314342 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-dns-svc\") pod \"dnsmasq-dns-6c89d5d749-9psdj\" (UID: \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\") " pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.314392 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-config\") pod \"dnsmasq-dns-6c89d5d749-9psdj\" (UID: \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\") " pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.346427 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-dnt84"] Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.359497 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.364129 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.375900 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-dnt84"] Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.419935 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-ovsdbserver-sb\") pod \"dnsmasq-dns-6c89d5d749-9psdj\" (UID: \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\") " pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.419982 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqcqq\" (UniqueName: \"kubernetes.io/projected/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-kube-api-access-hqcqq\") pod \"dnsmasq-dns-6c89d5d749-9psdj\" (UID: \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\") " pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.420043 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/ee500515-c2eb-4f8e-b022-1d4f1bb8106e-ovs-rundir\") pod \"ovn-controller-metrics-dnt84\" (UID: \"ee500515-c2eb-4f8e-b022-1d4f1bb8106e\") " pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.420071 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee500515-c2eb-4f8e-b022-1d4f1bb8106e-combined-ca-bundle\") pod \"ovn-controller-metrics-dnt84\" (UID: \"ee500515-c2eb-4f8e-b022-1d4f1bb8106e\") " pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.422229 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee500515-c2eb-4f8e-b022-1d4f1bb8106e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-dnt84\" (UID: \"ee500515-c2eb-4f8e-b022-1d4f1bb8106e\") " pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.422427 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-dns-svc\") pod \"dnsmasq-dns-6c89d5d749-9psdj\" (UID: \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\") " pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.422461 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-config\") pod \"dnsmasq-dns-6c89d5d749-9psdj\" (UID: \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\") " pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.422594 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/ee500515-c2eb-4f8e-b022-1d4f1bb8106e-ovn-rundir\") pod \"ovn-controller-metrics-dnt84\" (UID: \"ee500515-c2eb-4f8e-b022-1d4f1bb8106e\") " pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.422641 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee500515-c2eb-4f8e-b022-1d4f1bb8106e-config\") pod \"ovn-controller-metrics-dnt84\" (UID: \"ee500515-c2eb-4f8e-b022-1d4f1bb8106e\") " pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.422681 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lp7l6\" (UniqueName: \"kubernetes.io/projected/ee500515-c2eb-4f8e-b022-1d4f1bb8106e-kube-api-access-lp7l6\") pod \"ovn-controller-metrics-dnt84\" (UID: \"ee500515-c2eb-4f8e-b022-1d4f1bb8106e\") " pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.423008 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-ovsdbserver-sb\") pod \"dnsmasq-dns-6c89d5d749-9psdj\" (UID: \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\") " pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.423214 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-config\") pod \"dnsmasq-dns-6c89d5d749-9psdj\" (UID: \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\") " pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.423316 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-dns-svc\") pod \"dnsmasq-dns-6c89d5d749-9psdj\" (UID: \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\") " pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.455975 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqcqq\" (UniqueName: \"kubernetes.io/projected/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-kube-api-access-hqcqq\") pod \"dnsmasq-dns-6c89d5d749-9psdj\" (UID: \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\") " pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.478042 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-n7wbr"] Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.523923 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/ee500515-c2eb-4f8e-b022-1d4f1bb8106e-ovn-rundir\") pod \"ovn-controller-metrics-dnt84\" (UID: \"ee500515-c2eb-4f8e-b022-1d4f1bb8106e\") " pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.524241 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee500515-c2eb-4f8e-b022-1d4f1bb8106e-config\") pod \"ovn-controller-metrics-dnt84\" (UID: \"ee500515-c2eb-4f8e-b022-1d4f1bb8106e\") " pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.524273 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lp7l6\" (UniqueName: \"kubernetes.io/projected/ee500515-c2eb-4f8e-b022-1d4f1bb8106e-kube-api-access-lp7l6\") pod \"ovn-controller-metrics-dnt84\" (UID: \"ee500515-c2eb-4f8e-b022-1d4f1bb8106e\") " pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.524345 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/ee500515-c2eb-4f8e-b022-1d4f1bb8106e-ovs-rundir\") pod \"ovn-controller-metrics-dnt84\" (UID: \"ee500515-c2eb-4f8e-b022-1d4f1bb8106e\") " pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.524389 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee500515-c2eb-4f8e-b022-1d4f1bb8106e-combined-ca-bundle\") pod \"ovn-controller-metrics-dnt84\" (UID: \"ee500515-c2eb-4f8e-b022-1d4f1bb8106e\") " pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.524416 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee500515-c2eb-4f8e-b022-1d4f1bb8106e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-dnt84\" (UID: \"ee500515-c2eb-4f8e-b022-1d4f1bb8106e\") " pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.524477 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-7lrtg"] Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.525803 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.524199 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/ee500515-c2eb-4f8e-b022-1d4f1bb8106e-ovn-rundir\") pod \"ovn-controller-metrics-dnt84\" (UID: \"ee500515-c2eb-4f8e-b022-1d4f1bb8106e\") " pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.527200 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee500515-c2eb-4f8e-b022-1d4f1bb8106e-config\") pod \"ovn-controller-metrics-dnt84\" (UID: \"ee500515-c2eb-4f8e-b022-1d4f1bb8106e\") " pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.527273 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/ee500515-c2eb-4f8e-b022-1d4f1bb8106e-ovs-rundir\") pod \"ovn-controller-metrics-dnt84\" (UID: \"ee500515-c2eb-4f8e-b022-1d4f1bb8106e\") " pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.529603 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.538632 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee500515-c2eb-4f8e-b022-1d4f1bb8106e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-dnt84\" (UID: \"ee500515-c2eb-4f8e-b022-1d4f1bb8106e\") " pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.538749 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee500515-c2eb-4f8e-b022-1d4f1bb8106e-combined-ca-bundle\") pod \"ovn-controller-metrics-dnt84\" (UID: \"ee500515-c2eb-4f8e-b022-1d4f1bb8106e\") " pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.556594 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-7lrtg"] Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.567469 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lp7l6\" (UniqueName: \"kubernetes.io/projected/ee500515-c2eb-4f8e-b022-1d4f1bb8106e-kube-api-access-lp7l6\") pod \"ovn-controller-metrics-dnt84\" (UID: \"ee500515-c2eb-4f8e-b022-1d4f1bb8106e\") " pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.570833 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.626470 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-config\") pod \"dnsmasq-dns-698758b865-7lrtg\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.626532 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-dns-svc\") pod \"dnsmasq-dns-698758b865-7lrtg\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.626571 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-7lrtg\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.626595 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-7lrtg\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.626675 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqc5k\" (UniqueName: \"kubernetes.io/projected/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-kube-api-access-xqc5k\") pod \"dnsmasq-dns-698758b865-7lrtg\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.683816 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.685058 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.691569 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.691810 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.692002 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-k85rk" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.692104 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.703614 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-dnt84" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.718747 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.730159 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqc5k\" (UniqueName: \"kubernetes.io/projected/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-kube-api-access-xqc5k\") pod \"dnsmasq-dns-698758b865-7lrtg\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.730296 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-config\") pod \"dnsmasq-dns-698758b865-7lrtg\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.730328 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-dns-svc\") pod \"dnsmasq-dns-698758b865-7lrtg\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.730386 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-etc-swift\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.730408 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-7lrtg\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.730447 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-7lrtg\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:48:50 crc kubenswrapper[4631]: E1204 17:48:50.731755 4631 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 04 17:48:50 crc kubenswrapper[4631]: E1204 17:48:50.731785 4631 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 04 17:48:50 crc kubenswrapper[4631]: E1204 17:48:50.731822 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-etc-swift podName:8acd1342-fa9f-43be-9c9f-28739a5aed78 nodeName:}" failed. No retries permitted until 2025-12-04 17:48:54.731808733 +0000 UTC m=+1264.764050721 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-etc-swift") pod "swift-storage-0" (UID: "8acd1342-fa9f-43be-9c9f-28739a5aed78") : configmap "swift-ring-files" not found Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.731941 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-dns-svc\") pod \"dnsmasq-dns-698758b865-7lrtg\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.733188 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-7lrtg\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.733198 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-config\") pod \"dnsmasq-dns-698758b865-7lrtg\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.733572 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-7lrtg\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.753868 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqc5k\" (UniqueName: \"kubernetes.io/projected/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-kube-api-access-xqc5k\") pod \"dnsmasq-dns-698758b865-7lrtg\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.832191 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a597650-5fec-493b-bda8-93bb60985ae5-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.832283 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a597650-5fec-493b-bda8-93bb60985ae5-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.832409 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a597650-5fec-493b-bda8-93bb60985ae5-scripts\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.832443 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a597650-5fec-493b-bda8-93bb60985ae5-config\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.832622 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5a597650-5fec-493b-bda8-93bb60985ae5-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.832681 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a597650-5fec-493b-bda8-93bb60985ae5-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.832697 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chhkl\" (UniqueName: \"kubernetes.io/projected/5a597650-5fec-493b-bda8-93bb60985ae5-kube-api-access-chhkl\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.911723 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.934716 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5a597650-5fec-493b-bda8-93bb60985ae5-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.934776 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a597650-5fec-493b-bda8-93bb60985ae5-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.934801 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chhkl\" (UniqueName: \"kubernetes.io/projected/5a597650-5fec-493b-bda8-93bb60985ae5-kube-api-access-chhkl\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.934837 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a597650-5fec-493b-bda8-93bb60985ae5-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.934897 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a597650-5fec-493b-bda8-93bb60985ae5-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.934953 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a597650-5fec-493b-bda8-93bb60985ae5-scripts\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.934986 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a597650-5fec-493b-bda8-93bb60985ae5-config\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.935292 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5a597650-5fec-493b-bda8-93bb60985ae5-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.935850 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a597650-5fec-493b-bda8-93bb60985ae5-config\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.936509 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a597650-5fec-493b-bda8-93bb60985ae5-scripts\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.938784 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a597650-5fec-493b-bda8-93bb60985ae5-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.941986 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a597650-5fec-493b-bda8-93bb60985ae5-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.942345 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a597650-5fec-493b-bda8-93bb60985ae5-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:50 crc kubenswrapper[4631]: I1204 17:48:50.952518 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chhkl\" (UniqueName: \"kubernetes.io/projected/5a597650-5fec-493b-bda8-93bb60985ae5-kube-api-access-chhkl\") pod \"ovn-northd-0\" (UID: \"5a597650-5fec-493b-bda8-93bb60985ae5\") " pod="openstack/ovn-northd-0" Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.007647 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.249073 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.249123 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.251022 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-hjd8c" Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.340153 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3425ef2-08d1-4d79-b36e-03852e0e0750-config\") pod \"a3425ef2-08d1-4d79-b36e-03852e0e0750\" (UID: \"a3425ef2-08d1-4d79-b36e-03852e0e0750\") " Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.340497 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spfvb\" (UniqueName: \"kubernetes.io/projected/a3425ef2-08d1-4d79-b36e-03852e0e0750-kube-api-access-spfvb\") pod \"a3425ef2-08d1-4d79-b36e-03852e0e0750\" (UID: \"a3425ef2-08d1-4d79-b36e-03852e0e0750\") " Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.340577 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3425ef2-08d1-4d79-b36e-03852e0e0750-dns-svc\") pod \"a3425ef2-08d1-4d79-b36e-03852e0e0750\" (UID: \"a3425ef2-08d1-4d79-b36e-03852e0e0750\") " Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.344589 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3425ef2-08d1-4d79-b36e-03852e0e0750-kube-api-access-spfvb" (OuterVolumeSpecName: "kube-api-access-spfvb") pod "a3425ef2-08d1-4d79-b36e-03852e0e0750" (UID: "a3425ef2-08d1-4d79-b36e-03852e0e0750"). InnerVolumeSpecName "kube-api-access-spfvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.423591 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3425ef2-08d1-4d79-b36e-03852e0e0750-config" (OuterVolumeSpecName: "config") pod "a3425ef2-08d1-4d79-b36e-03852e0e0750" (UID: "a3425ef2-08d1-4d79-b36e-03852e0e0750"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.432683 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3425ef2-08d1-4d79-b36e-03852e0e0750-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a3425ef2-08d1-4d79-b36e-03852e0e0750" (UID: "a3425ef2-08d1-4d79-b36e-03852e0e0750"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.447041 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3425ef2-08d1-4d79-b36e-03852e0e0750-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.447067 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spfvb\" (UniqueName: \"kubernetes.io/projected/a3425ef2-08d1-4d79-b36e-03852e0e0750-kube-api-access-spfvb\") on node \"crc\" DevicePath \"\"" Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.447078 4631 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3425ef2-08d1-4d79-b36e-03852e0e0750-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.673212 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-hjd8c" event={"ID":"a3425ef2-08d1-4d79-b36e-03852e0e0750","Type":"ContainerDied","Data":"f01b9cc2082b3750211068ca70ac93331696bf4ad481f1cfded99982444f977a"} Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.673277 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-hjd8c" Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.673309 4631 scope.go:117] "RemoveContainer" containerID="19a53491dd5c779d9b09fdfd9d4d943d1f632b53042f7342d1b5d4a0d3a71801" Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.674085 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" podUID="a0a74f92-98dc-437d-8ab1-784e90727a5d" containerName="dnsmasq-dns" containerID="cri-o://bc2f5f110cac5a4388df8e54bc990de802b5fbb18160d2f8fe3a8f86cd6d9d4c" gracePeriod=10 Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.730876 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-hjd8c"] Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.737177 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-hjd8c"] Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.765701 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-dnt84"] Dec 04 17:48:51 crc kubenswrapper[4631]: I1204 17:48:51.778691 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c89d5d749-9psdj"] Dec 04 17:48:52 crc kubenswrapper[4631]: I1204 17:48:52.261096 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3425ef2-08d1-4d79-b36e-03852e0e0750" path="/var/lib/kubelet/pods/a3425ef2-08d1-4d79-b36e-03852e0e0750/volumes" Dec 04 17:48:52 crc kubenswrapper[4631]: I1204 17:48:52.435123 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-7lrtg"] Dec 04 17:48:52 crc kubenswrapper[4631]: I1204 17:48:52.553536 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:52 crc kubenswrapper[4631]: I1204 17:48:52.553578 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:52 crc kubenswrapper[4631]: I1204 17:48:52.565084 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 04 17:48:52 crc kubenswrapper[4631]: I1204 17:48:52.685746 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-dnt84" event={"ID":"ee500515-c2eb-4f8e-b022-1d4f1bb8106e","Type":"ContainerStarted","Data":"e08355ea6e305f13b2dccc995dba924eb036b96ecb6e08edfe5533cf8342e21a"} Dec 04 17:48:52 crc kubenswrapper[4631]: I1204 17:48:52.687305 4631 generic.go:334] "Generic (PLEG): container finished" podID="59f266c6-b736-4db4-bd16-b5dbc7ee4f83" containerID="238ee34c80a7fa7016342cc3166286a1ba8f8a9c5e5fa31eeaa0b37d5208784d" exitCode=0 Dec 04 17:48:52 crc kubenswrapper[4631]: I1204 17:48:52.687402 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" event={"ID":"59f266c6-b736-4db4-bd16-b5dbc7ee4f83","Type":"ContainerDied","Data":"238ee34c80a7fa7016342cc3166286a1ba8f8a9c5e5fa31eeaa0b37d5208784d"} Dec 04 17:48:52 crc kubenswrapper[4631]: I1204 17:48:52.687426 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" event={"ID":"59f266c6-b736-4db4-bd16-b5dbc7ee4f83","Type":"ContainerStarted","Data":"d06f42837fc5828f33d817195aac0e2ad414eaa777c8df04036ebe03463e04ab"} Dec 04 17:48:52 crc kubenswrapper[4631]: I1204 17:48:52.695009 4631 generic.go:334] "Generic (PLEG): container finished" podID="a0a74f92-98dc-437d-8ab1-784e90727a5d" containerID="bc2f5f110cac5a4388df8e54bc990de802b5fbb18160d2f8fe3a8f86cd6d9d4c" exitCode=0 Dec 04 17:48:52 crc kubenswrapper[4631]: I1204 17:48:52.695047 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" event={"ID":"a0a74f92-98dc-437d-8ab1-784e90727a5d","Type":"ContainerDied","Data":"bc2f5f110cac5a4388df8e54bc990de802b5fbb18160d2f8fe3a8f86cd6d9d4c"} Dec 04 17:48:53 crc kubenswrapper[4631]: W1204 17:48:53.434339 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5b28b17b_31f2_49c7_b089_dfd0275bc8ba.slice/crio-06d3b98ecbb7e4333d8f78a911719ed995a8013f5eb8c0426a257c0a430f362d WatchSource:0}: Error finding container 06d3b98ecbb7e4333d8f78a911719ed995a8013f5eb8c0426a257c0a430f362d: Status 404 returned error can't find the container with id 06d3b98ecbb7e4333d8f78a911719ed995a8013f5eb8c0426a257c0a430f362d Dec 04 17:48:53 crc kubenswrapper[4631]: I1204 17:48:53.533518 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" Dec 04 17:48:53 crc kubenswrapper[4631]: I1204 17:48:53.604189 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99gvq\" (UniqueName: \"kubernetes.io/projected/a0a74f92-98dc-437d-8ab1-784e90727a5d-kube-api-access-99gvq\") pod \"a0a74f92-98dc-437d-8ab1-784e90727a5d\" (UID: \"a0a74f92-98dc-437d-8ab1-784e90727a5d\") " Dec 04 17:48:53 crc kubenswrapper[4631]: I1204 17:48:53.604379 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0a74f92-98dc-437d-8ab1-784e90727a5d-config\") pod \"a0a74f92-98dc-437d-8ab1-784e90727a5d\" (UID: \"a0a74f92-98dc-437d-8ab1-784e90727a5d\") " Dec 04 17:48:53 crc kubenswrapper[4631]: I1204 17:48:53.604427 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0a74f92-98dc-437d-8ab1-784e90727a5d-dns-svc\") pod \"a0a74f92-98dc-437d-8ab1-784e90727a5d\" (UID: \"a0a74f92-98dc-437d-8ab1-784e90727a5d\") " Dec 04 17:48:53 crc kubenswrapper[4631]: I1204 17:48:53.616090 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0a74f92-98dc-437d-8ab1-784e90727a5d-kube-api-access-99gvq" (OuterVolumeSpecName: "kube-api-access-99gvq") pod "a0a74f92-98dc-437d-8ab1-784e90727a5d" (UID: "a0a74f92-98dc-437d-8ab1-784e90727a5d"). InnerVolumeSpecName "kube-api-access-99gvq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:48:53 crc kubenswrapper[4631]: I1204 17:48:53.706958 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99gvq\" (UniqueName: \"kubernetes.io/projected/a0a74f92-98dc-437d-8ab1-784e90727a5d-kube-api-access-99gvq\") on node \"crc\" DevicePath \"\"" Dec 04 17:48:53 crc kubenswrapper[4631]: I1204 17:48:53.708279 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" event={"ID":"a0a74f92-98dc-437d-8ab1-784e90727a5d","Type":"ContainerDied","Data":"73d30f8eff462021f139d8b625569c891e360719002ad9d29de8be85cdd20c8a"} Dec 04 17:48:53 crc kubenswrapper[4631]: I1204 17:48:53.708315 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-n7wbr" Dec 04 17:48:53 crc kubenswrapper[4631]: I1204 17:48:53.708319 4631 scope.go:117] "RemoveContainer" containerID="bc2f5f110cac5a4388df8e54bc990de802b5fbb18160d2f8fe3a8f86cd6d9d4c" Dec 04 17:48:53 crc kubenswrapper[4631]: I1204 17:48:53.712267 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-7lrtg" event={"ID":"5b28b17b-31f2-49c7-b089-dfd0275bc8ba","Type":"ContainerStarted","Data":"06d3b98ecbb7e4333d8f78a911719ed995a8013f5eb8c0426a257c0a430f362d"} Dec 04 17:48:53 crc kubenswrapper[4631]: I1204 17:48:53.755041 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0a74f92-98dc-437d-8ab1-784e90727a5d-config" (OuterVolumeSpecName: "config") pod "a0a74f92-98dc-437d-8ab1-784e90727a5d" (UID: "a0a74f92-98dc-437d-8ab1-784e90727a5d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:48:53 crc kubenswrapper[4631]: I1204 17:48:53.761924 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0a74f92-98dc-437d-8ab1-784e90727a5d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a0a74f92-98dc-437d-8ab1-784e90727a5d" (UID: "a0a74f92-98dc-437d-8ab1-784e90727a5d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:48:53 crc kubenswrapper[4631]: I1204 17:48:53.808698 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0a74f92-98dc-437d-8ab1-784e90727a5d-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:48:53 crc kubenswrapper[4631]: I1204 17:48:53.808738 4631 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0a74f92-98dc-437d-8ab1-784e90727a5d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 17:48:54 crc kubenswrapper[4631]: I1204 17:48:54.041136 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-n7wbr"] Dec 04 17:48:54 crc kubenswrapper[4631]: I1204 17:48:54.049133 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-n7wbr"] Dec 04 17:48:54 crc kubenswrapper[4631]: I1204 17:48:54.273250 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0a74f92-98dc-437d-8ab1-784e90727a5d" path="/var/lib/kubelet/pods/a0a74f92-98dc-437d-8ab1-784e90727a5d/volumes" Dec 04 17:48:54 crc kubenswrapper[4631]: I1204 17:48:54.722072 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-dnt84" event={"ID":"ee500515-c2eb-4f8e-b022-1d4f1bb8106e","Type":"ContainerStarted","Data":"f1ad360b6a010d30dfbfeb6b51ee46f3528a939c40219511a6845a524cc79e58"} Dec 04 17:48:54 crc kubenswrapper[4631]: I1204 17:48:54.739981 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-dnt84" podStartSLOduration=4.73996077 podStartE2EDuration="4.73996077s" podCreationTimestamp="2025-12-04 17:48:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:48:54.735308707 +0000 UTC m=+1264.767550705" watchObservedRunningTime="2025-12-04 17:48:54.73996077 +0000 UTC m=+1264.772202768" Dec 04 17:48:54 crc kubenswrapper[4631]: I1204 17:48:54.822472 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-etc-swift\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:48:54 crc kubenswrapper[4631]: E1204 17:48:54.822710 4631 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 04 17:48:54 crc kubenswrapper[4631]: E1204 17:48:54.822903 4631 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 04 17:48:54 crc kubenswrapper[4631]: E1204 17:48:54.822970 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-etc-swift podName:8acd1342-fa9f-43be-9c9f-28739a5aed78 nodeName:}" failed. No retries permitted until 2025-12-04 17:49:02.822948133 +0000 UTC m=+1272.855190161 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-etc-swift") pod "swift-storage-0" (UID: "8acd1342-fa9f-43be-9c9f-28739a5aed78") : configmap "swift-ring-files" not found Dec 04 17:48:55 crc kubenswrapper[4631]: I1204 17:48:55.016565 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:55 crc kubenswrapper[4631]: I1204 17:48:55.097086 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 04 17:48:55 crc kubenswrapper[4631]: I1204 17:48:55.147821 4631 scope.go:117] "RemoveContainer" containerID="e7095a23c6c6bc6ffab10a8950ad4abbfb1514fe35d2c9c11df22cdee5c02d33" Dec 04 17:48:55 crc kubenswrapper[4631]: I1204 17:48:55.729503 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-xjxf8" event={"ID":"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f","Type":"ContainerStarted","Data":"3ecd8835482006425ba52f6c5ad4068c69067c6eb762afcdb4fd849834f83639"} Dec 04 17:48:55 crc kubenswrapper[4631]: I1204 17:48:55.732521 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" event={"ID":"59f266c6-b736-4db4-bd16-b5dbc7ee4f83","Type":"ContainerStarted","Data":"0edbc4982a1f6f49e7169976edd90c52f9d6c00c64e4f4d29c2e38a599a6c401"} Dec 04 17:48:55 crc kubenswrapper[4631]: I1204 17:48:55.732889 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" Dec 04 17:48:55 crc kubenswrapper[4631]: I1204 17:48:55.736238 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"5a597650-5fec-493b-bda8-93bb60985ae5","Type":"ContainerStarted","Data":"d8c00411dcddbdaf11532e4cb7c19f8a70c92ca4c51ab5e29b5d2c8453c0a553"} Dec 04 17:48:55 crc kubenswrapper[4631]: I1204 17:48:55.738415 4631 generic.go:334] "Generic (PLEG): container finished" podID="5b28b17b-31f2-49c7-b089-dfd0275bc8ba" containerID="fc250b501ffbf552b7a5b933df5170dae19ce0b17a4f0ec1a3a20b8720f6c07e" exitCode=0 Dec 04 17:48:55 crc kubenswrapper[4631]: I1204 17:48:55.739193 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-7lrtg" event={"ID":"5b28b17b-31f2-49c7-b089-dfd0275bc8ba","Type":"ContainerDied","Data":"fc250b501ffbf552b7a5b933df5170dae19ce0b17a4f0ec1a3a20b8720f6c07e"} Dec 04 17:48:55 crc kubenswrapper[4631]: I1204 17:48:55.753665 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-xjxf8" podStartSLOduration=1.788521397 podStartE2EDuration="8.753640267s" podCreationTimestamp="2025-12-04 17:48:47 +0000 UTC" firstStartedPulling="2025-12-04 17:48:48.307561357 +0000 UTC m=+1258.339803355" lastFinishedPulling="2025-12-04 17:48:55.272680227 +0000 UTC m=+1265.304922225" observedRunningTime="2025-12-04 17:48:55.74713072 +0000 UTC m=+1265.779372718" watchObservedRunningTime="2025-12-04 17:48:55.753640267 +0000 UTC m=+1265.785882275" Dec 04 17:48:55 crc kubenswrapper[4631]: I1204 17:48:55.845784 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" podStartSLOduration=5.845756912 podStartE2EDuration="5.845756912s" podCreationTimestamp="2025-12-04 17:48:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:48:55.842742645 +0000 UTC m=+1265.874984643" watchObservedRunningTime="2025-12-04 17:48:55.845756912 +0000 UTC m=+1265.877998910" Dec 04 17:48:56 crc kubenswrapper[4631]: I1204 17:48:56.015338 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 04 17:48:56 crc kubenswrapper[4631]: I1204 17:48:56.091103 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 04 17:48:56 crc kubenswrapper[4631]: I1204 17:48:56.747451 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"5a597650-5fec-493b-bda8-93bb60985ae5","Type":"ContainerStarted","Data":"49527ee27cf5437f612b9c10feac4cb8f141a18c07d9092a132fbbfba927fdf4"} Dec 04 17:48:56 crc kubenswrapper[4631]: I1204 17:48:56.754103 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-7lrtg" event={"ID":"5b28b17b-31f2-49c7-b089-dfd0275bc8ba","Type":"ContainerStarted","Data":"65fd63a43a64bc28cba0b6c37c0567b947410004d363d14ef4a6cfb7256d04f6"} Dec 04 17:48:56 crc kubenswrapper[4631]: I1204 17:48:56.754607 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:48:56 crc kubenswrapper[4631]: I1204 17:48:56.782627 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-7lrtg" podStartSLOduration=6.782611982 podStartE2EDuration="6.782611982s" podCreationTimestamp="2025-12-04 17:48:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:48:56.777973369 +0000 UTC m=+1266.810215377" watchObservedRunningTime="2025-12-04 17:48:56.782611982 +0000 UTC m=+1266.814853980" Dec 04 17:48:57 crc kubenswrapper[4631]: I1204 17:48:57.765233 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"5a597650-5fec-493b-bda8-93bb60985ae5","Type":"ContainerStarted","Data":"48e84ce8fefb1426d534e140534273e3458a66ce4d160386d9f54d10d6abba33"} Dec 04 17:48:57 crc kubenswrapper[4631]: I1204 17:48:57.766035 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 04 17:48:57 crc kubenswrapper[4631]: I1204 17:48:57.793328 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=6.529009481 podStartE2EDuration="7.793314173s" podCreationTimestamp="2025-12-04 17:48:50 +0000 UTC" firstStartedPulling="2025-12-04 17:48:55.161900626 +0000 UTC m=+1265.194142624" lastFinishedPulling="2025-12-04 17:48:56.426205318 +0000 UTC m=+1266.458447316" observedRunningTime="2025-12-04 17:48:57.789160933 +0000 UTC m=+1267.821402981" watchObservedRunningTime="2025-12-04 17:48:57.793314173 +0000 UTC m=+1267.825556171" Dec 04 17:49:00 crc kubenswrapper[4631]: I1204 17:49:00.572554 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" Dec 04 17:49:00 crc kubenswrapper[4631]: I1204 17:49:00.916554 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:49:00 crc kubenswrapper[4631]: I1204 17:49:00.971833 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c89d5d749-9psdj"] Dec 04 17:49:00 crc kubenswrapper[4631]: I1204 17:49:00.972133 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" podUID="59f266c6-b736-4db4-bd16-b5dbc7ee4f83" containerName="dnsmasq-dns" containerID="cri-o://0edbc4982a1f6f49e7169976edd90c52f9d6c00c64e4f4d29c2e38a599a6c401" gracePeriod=10 Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.457820 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.652904 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-config\") pod \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\" (UID: \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\") " Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.652950 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqcqq\" (UniqueName: \"kubernetes.io/projected/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-kube-api-access-hqcqq\") pod \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\" (UID: \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\") " Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.653008 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-ovsdbserver-sb\") pod \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\" (UID: \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\") " Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.653033 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-dns-svc\") pod \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\" (UID: \"59f266c6-b736-4db4-bd16-b5dbc7ee4f83\") " Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.672616 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-kube-api-access-hqcqq" (OuterVolumeSpecName: "kube-api-access-hqcqq") pod "59f266c6-b736-4db4-bd16-b5dbc7ee4f83" (UID: "59f266c6-b736-4db4-bd16-b5dbc7ee4f83"). InnerVolumeSpecName "kube-api-access-hqcqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.707043 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "59f266c6-b736-4db4-bd16-b5dbc7ee4f83" (UID: "59f266c6-b736-4db4-bd16-b5dbc7ee4f83"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.713313 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "59f266c6-b736-4db4-bd16-b5dbc7ee4f83" (UID: "59f266c6-b736-4db4-bd16-b5dbc7ee4f83"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.718849 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-config" (OuterVolumeSpecName: "config") pod "59f266c6-b736-4db4-bd16-b5dbc7ee4f83" (UID: "59f266c6-b736-4db4-bd16-b5dbc7ee4f83"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.754635 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.755494 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqcqq\" (UniqueName: \"kubernetes.io/projected/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-kube-api-access-hqcqq\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.755608 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.755729 4631 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/59f266c6-b736-4db4-bd16-b5dbc7ee4f83-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.790766 4631 generic.go:334] "Generic (PLEG): container finished" podID="59f266c6-b736-4db4-bd16-b5dbc7ee4f83" containerID="0edbc4982a1f6f49e7169976edd90c52f9d6c00c64e4f4d29c2e38a599a6c401" exitCode=0 Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.791003 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" event={"ID":"59f266c6-b736-4db4-bd16-b5dbc7ee4f83","Type":"ContainerDied","Data":"0edbc4982a1f6f49e7169976edd90c52f9d6c00c64e4f4d29c2e38a599a6c401"} Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.791098 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" event={"ID":"59f266c6-b736-4db4-bd16-b5dbc7ee4f83","Type":"ContainerDied","Data":"d06f42837fc5828f33d817195aac0e2ad414eaa777c8df04036ebe03463e04ab"} Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.791192 4631 scope.go:117] "RemoveContainer" containerID="0edbc4982a1f6f49e7169976edd90c52f9d6c00c64e4f4d29c2e38a599a6c401" Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.791398 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c89d5d749-9psdj" Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.810352 4631 scope.go:117] "RemoveContainer" containerID="238ee34c80a7fa7016342cc3166286a1ba8f8a9c5e5fa31eeaa0b37d5208784d" Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.845133 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c89d5d749-9psdj"] Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.849230 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c89d5d749-9psdj"] Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.852248 4631 scope.go:117] "RemoveContainer" containerID="0edbc4982a1f6f49e7169976edd90c52f9d6c00c64e4f4d29c2e38a599a6c401" Dec 04 17:49:01 crc kubenswrapper[4631]: E1204 17:49:01.852750 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0edbc4982a1f6f49e7169976edd90c52f9d6c00c64e4f4d29c2e38a599a6c401\": container with ID starting with 0edbc4982a1f6f49e7169976edd90c52f9d6c00c64e4f4d29c2e38a599a6c401 not found: ID does not exist" containerID="0edbc4982a1f6f49e7169976edd90c52f9d6c00c64e4f4d29c2e38a599a6c401" Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.852791 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0edbc4982a1f6f49e7169976edd90c52f9d6c00c64e4f4d29c2e38a599a6c401"} err="failed to get container status \"0edbc4982a1f6f49e7169976edd90c52f9d6c00c64e4f4d29c2e38a599a6c401\": rpc error: code = NotFound desc = could not find container \"0edbc4982a1f6f49e7169976edd90c52f9d6c00c64e4f4d29c2e38a599a6c401\": container with ID starting with 0edbc4982a1f6f49e7169976edd90c52f9d6c00c64e4f4d29c2e38a599a6c401 not found: ID does not exist" Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.852815 4631 scope.go:117] "RemoveContainer" containerID="238ee34c80a7fa7016342cc3166286a1ba8f8a9c5e5fa31eeaa0b37d5208784d" Dec 04 17:49:01 crc kubenswrapper[4631]: E1204 17:49:01.853065 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"238ee34c80a7fa7016342cc3166286a1ba8f8a9c5e5fa31eeaa0b37d5208784d\": container with ID starting with 238ee34c80a7fa7016342cc3166286a1ba8f8a9c5e5fa31eeaa0b37d5208784d not found: ID does not exist" containerID="238ee34c80a7fa7016342cc3166286a1ba8f8a9c5e5fa31eeaa0b37d5208784d" Dec 04 17:49:01 crc kubenswrapper[4631]: I1204 17:49:01.853094 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"238ee34c80a7fa7016342cc3166286a1ba8f8a9c5e5fa31eeaa0b37d5208784d"} err="failed to get container status \"238ee34c80a7fa7016342cc3166286a1ba8f8a9c5e5fa31eeaa0b37d5208784d\": rpc error: code = NotFound desc = could not find container \"238ee34c80a7fa7016342cc3166286a1ba8f8a9c5e5fa31eeaa0b37d5208784d\": container with ID starting with 238ee34c80a7fa7016342cc3166286a1ba8f8a9c5e5fa31eeaa0b37d5208784d not found: ID does not exist" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.248099 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59f266c6-b736-4db4-bd16-b5dbc7ee4f83" path="/var/lib/kubelet/pods/59f266c6-b736-4db4-bd16-b5dbc7ee4f83/volumes" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.735592 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-4214-account-create-update-wph5g"] Dec 04 17:49:02 crc kubenswrapper[4631]: E1204 17:49:02.736229 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0a74f92-98dc-437d-8ab1-784e90727a5d" containerName="init" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.736241 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0a74f92-98dc-437d-8ab1-784e90727a5d" containerName="init" Dec 04 17:49:02 crc kubenswrapper[4631]: E1204 17:49:02.736260 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59f266c6-b736-4db4-bd16-b5dbc7ee4f83" containerName="dnsmasq-dns" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.736266 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="59f266c6-b736-4db4-bd16-b5dbc7ee4f83" containerName="dnsmasq-dns" Dec 04 17:49:02 crc kubenswrapper[4631]: E1204 17:49:02.736284 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3425ef2-08d1-4d79-b36e-03852e0e0750" containerName="init" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.736290 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3425ef2-08d1-4d79-b36e-03852e0e0750" containerName="init" Dec 04 17:49:02 crc kubenswrapper[4631]: E1204 17:49:02.736298 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59f266c6-b736-4db4-bd16-b5dbc7ee4f83" containerName="init" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.736304 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="59f266c6-b736-4db4-bd16-b5dbc7ee4f83" containerName="init" Dec 04 17:49:02 crc kubenswrapper[4631]: E1204 17:49:02.736317 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0a74f92-98dc-437d-8ab1-784e90727a5d" containerName="dnsmasq-dns" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.736322 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0a74f92-98dc-437d-8ab1-784e90727a5d" containerName="dnsmasq-dns" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.736507 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="59f266c6-b736-4db4-bd16-b5dbc7ee4f83" containerName="dnsmasq-dns" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.736521 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3425ef2-08d1-4d79-b36e-03852e0e0750" containerName="init" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.736533 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0a74f92-98dc-437d-8ab1-784e90727a5d" containerName="dnsmasq-dns" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.737079 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-4214-account-create-update-wph5g" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.739830 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.752715 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-4214-account-create-update-wph5g"] Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.770574 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cmjx\" (UniqueName: \"kubernetes.io/projected/5a142a3c-8ece-4f7e-a43d-778c9ad25a32-kube-api-access-6cmjx\") pod \"keystone-4214-account-create-update-wph5g\" (UID: \"5a142a3c-8ece-4f7e-a43d-778c9ad25a32\") " pod="openstack/keystone-4214-account-create-update-wph5g" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.770969 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a142a3c-8ece-4f7e-a43d-778c9ad25a32-operator-scripts\") pod \"keystone-4214-account-create-update-wph5g\" (UID: \"5a142a3c-8ece-4f7e-a43d-778c9ad25a32\") " pod="openstack/keystone-4214-account-create-update-wph5g" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.797620 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-j97nj"] Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.799278 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-j97nj" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.803957 4631 generic.go:334] "Generic (PLEG): container finished" podID="a71b38c4-ee61-49f2-8c8c-5adc05df2159" containerID="f3df45d8c13bc0c4ea4338ca7db675f52fb4e8786e9eaea1853623a900588c8c" exitCode=0 Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.804003 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a71b38c4-ee61-49f2-8c8c-5adc05df2159","Type":"ContainerDied","Data":"f3df45d8c13bc0c4ea4338ca7db675f52fb4e8786e9eaea1853623a900588c8c"} Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.809356 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-xjxf8" event={"ID":"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f","Type":"ContainerDied","Data":"3ecd8835482006425ba52f6c5ad4068c69067c6eb762afcdb4fd849834f83639"} Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.820370 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-j97nj"] Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.809275 4631 generic.go:334] "Generic (PLEG): container finished" podID="23cc29b2-48d6-42f1-a2ff-fbd418d0b47f" containerID="3ecd8835482006425ba52f6c5ad4068c69067c6eb762afcdb4fd849834f83639" exitCode=0 Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.823551 4631 generic.go:334] "Generic (PLEG): container finished" podID="2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" containerID="e833e3040ea35a329a540f414bd27255b574d3b651ea3a7efba64fa3fed817a2" exitCode=0 Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.823620 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef","Type":"ContainerDied","Data":"e833e3040ea35a329a540f414bd27255b574d3b651ea3a7efba64fa3fed817a2"} Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.871703 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a142a3c-8ece-4f7e-a43d-778c9ad25a32-operator-scripts\") pod \"keystone-4214-account-create-update-wph5g\" (UID: \"5a142a3c-8ece-4f7e-a43d-778c9ad25a32\") " pod="openstack/keystone-4214-account-create-update-wph5g" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.871774 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-etc-swift\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.871795 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cmjx\" (UniqueName: \"kubernetes.io/projected/5a142a3c-8ece-4f7e-a43d-778c9ad25a32-kube-api-access-6cmjx\") pod \"keystone-4214-account-create-update-wph5g\" (UID: \"5a142a3c-8ece-4f7e-a43d-778c9ad25a32\") " pod="openstack/keystone-4214-account-create-update-wph5g" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.872570 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a142a3c-8ece-4f7e-a43d-778c9ad25a32-operator-scripts\") pod \"keystone-4214-account-create-update-wph5g\" (UID: \"5a142a3c-8ece-4f7e-a43d-778c9ad25a32\") " pod="openstack/keystone-4214-account-create-update-wph5g" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.879205 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8acd1342-fa9f-43be-9c9f-28739a5aed78-etc-swift\") pod \"swift-storage-0\" (UID: \"8acd1342-fa9f-43be-9c9f-28739a5aed78\") " pod="openstack/swift-storage-0" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.897844 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cmjx\" (UniqueName: \"kubernetes.io/projected/5a142a3c-8ece-4f7e-a43d-778c9ad25a32-kube-api-access-6cmjx\") pod \"keystone-4214-account-create-update-wph5g\" (UID: \"5a142a3c-8ece-4f7e-a43d-778c9ad25a32\") " pod="openstack/keystone-4214-account-create-update-wph5g" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.973019 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7hqt\" (UniqueName: \"kubernetes.io/projected/fab6d82a-4302-4582-ab50-411aae70084a-kube-api-access-j7hqt\") pod \"keystone-db-create-j97nj\" (UID: \"fab6d82a-4302-4582-ab50-411aae70084a\") " pod="openstack/keystone-db-create-j97nj" Dec 04 17:49:02 crc kubenswrapper[4631]: I1204 17:49:02.973128 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fab6d82a-4302-4582-ab50-411aae70084a-operator-scripts\") pod \"keystone-db-create-j97nj\" (UID: \"fab6d82a-4302-4582-ab50-411aae70084a\") " pod="openstack/keystone-db-create-j97nj" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.027973 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-d6rbm"] Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.029180 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-d6rbm" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.038556 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-d6rbm"] Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.065508 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-4214-account-create-update-wph5g" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.077461 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7hqt\" (UniqueName: \"kubernetes.io/projected/fab6d82a-4302-4582-ab50-411aae70084a-kube-api-access-j7hqt\") pod \"keystone-db-create-j97nj\" (UID: \"fab6d82a-4302-4582-ab50-411aae70084a\") " pod="openstack/keystone-db-create-j97nj" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.077665 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fab6d82a-4302-4582-ab50-411aae70084a-operator-scripts\") pod \"keystone-db-create-j97nj\" (UID: \"fab6d82a-4302-4582-ab50-411aae70084a\") " pod="openstack/keystone-db-create-j97nj" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.078333 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fab6d82a-4302-4582-ab50-411aae70084a-operator-scripts\") pod \"keystone-db-create-j97nj\" (UID: \"fab6d82a-4302-4582-ab50-411aae70084a\") " pod="openstack/keystone-db-create-j97nj" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.097697 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7hqt\" (UniqueName: \"kubernetes.io/projected/fab6d82a-4302-4582-ab50-411aae70084a-kube-api-access-j7hqt\") pod \"keystone-db-create-j97nj\" (UID: \"fab6d82a-4302-4582-ab50-411aae70084a\") " pod="openstack/keystone-db-create-j97nj" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.140722 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.151873 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-15b3-account-create-update-cxkhf"] Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.152874 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-15b3-account-create-update-cxkhf" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.156280 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.185154 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-15b3-account-create-update-cxkhf"] Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.186242 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02956ee6-42ea-4056-ba19-1a79683632b9-operator-scripts\") pod \"placement-db-create-d6rbm\" (UID: \"02956ee6-42ea-4056-ba19-1a79683632b9\") " pod="openstack/placement-db-create-d6rbm" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.186323 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-542wc\" (UniqueName: \"kubernetes.io/projected/02956ee6-42ea-4056-ba19-1a79683632b9-kube-api-access-542wc\") pod \"placement-db-create-d6rbm\" (UID: \"02956ee6-42ea-4056-ba19-1a79683632b9\") " pod="openstack/placement-db-create-d6rbm" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.276756 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-j97nj" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.288069 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02956ee6-42ea-4056-ba19-1a79683632b9-operator-scripts\") pod \"placement-db-create-d6rbm\" (UID: \"02956ee6-42ea-4056-ba19-1a79683632b9\") " pod="openstack/placement-db-create-d6rbm" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.288137 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpmxk\" (UniqueName: \"kubernetes.io/projected/d07b4988-dc6b-4414-abf4-24abf610ecbf-kube-api-access-tpmxk\") pod \"placement-15b3-account-create-update-cxkhf\" (UID: \"d07b4988-dc6b-4414-abf4-24abf610ecbf\") " pod="openstack/placement-15b3-account-create-update-cxkhf" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.288214 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-542wc\" (UniqueName: \"kubernetes.io/projected/02956ee6-42ea-4056-ba19-1a79683632b9-kube-api-access-542wc\") pod \"placement-db-create-d6rbm\" (UID: \"02956ee6-42ea-4056-ba19-1a79683632b9\") " pod="openstack/placement-db-create-d6rbm" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.288264 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d07b4988-dc6b-4414-abf4-24abf610ecbf-operator-scripts\") pod \"placement-15b3-account-create-update-cxkhf\" (UID: \"d07b4988-dc6b-4414-abf4-24abf610ecbf\") " pod="openstack/placement-15b3-account-create-update-cxkhf" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.289028 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02956ee6-42ea-4056-ba19-1a79683632b9-operator-scripts\") pod \"placement-db-create-d6rbm\" (UID: \"02956ee6-42ea-4056-ba19-1a79683632b9\") " pod="openstack/placement-db-create-d6rbm" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.338098 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-542wc\" (UniqueName: \"kubernetes.io/projected/02956ee6-42ea-4056-ba19-1a79683632b9-kube-api-access-542wc\") pod \"placement-db-create-d6rbm\" (UID: \"02956ee6-42ea-4056-ba19-1a79683632b9\") " pod="openstack/placement-db-create-d6rbm" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.344059 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-d6rbm" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.394665 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d07b4988-dc6b-4414-abf4-24abf610ecbf-operator-scripts\") pod \"placement-15b3-account-create-update-cxkhf\" (UID: \"d07b4988-dc6b-4414-abf4-24abf610ecbf\") " pod="openstack/placement-15b3-account-create-update-cxkhf" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.394833 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpmxk\" (UniqueName: \"kubernetes.io/projected/d07b4988-dc6b-4414-abf4-24abf610ecbf-kube-api-access-tpmxk\") pod \"placement-15b3-account-create-update-cxkhf\" (UID: \"d07b4988-dc6b-4414-abf4-24abf610ecbf\") " pod="openstack/placement-15b3-account-create-update-cxkhf" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.396229 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d07b4988-dc6b-4414-abf4-24abf610ecbf-operator-scripts\") pod \"placement-15b3-account-create-update-cxkhf\" (UID: \"d07b4988-dc6b-4414-abf4-24abf610ecbf\") " pod="openstack/placement-15b3-account-create-update-cxkhf" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.410541 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-lblx5"] Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.411663 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-lblx5" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.435688 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-lblx5"] Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.454212 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-9167-account-create-update-gt79q"] Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.455737 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9167-account-create-update-gt79q" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.459825 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpmxk\" (UniqueName: \"kubernetes.io/projected/d07b4988-dc6b-4414-abf4-24abf610ecbf-kube-api-access-tpmxk\") pod \"placement-15b3-account-create-update-cxkhf\" (UID: \"d07b4988-dc6b-4414-abf4-24abf610ecbf\") " pod="openstack/placement-15b3-account-create-update-cxkhf" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.461048 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.498045 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-9167-account-create-update-gt79q"] Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.545906 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-15b3-account-create-update-cxkhf" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.597717 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45fe96ac-432d-4912-a365-ad375be740f0-operator-scripts\") pod \"glance-9167-account-create-update-gt79q\" (UID: \"45fe96ac-432d-4912-a365-ad375be740f0\") " pod="openstack/glance-9167-account-create-update-gt79q" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.597794 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b60da41-b099-42cf-a044-d268327eb8e7-operator-scripts\") pod \"glance-db-create-lblx5\" (UID: \"6b60da41-b099-42cf-a044-d268327eb8e7\") " pod="openstack/glance-db-create-lblx5" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.597820 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l49nk\" (UniqueName: \"kubernetes.io/projected/6b60da41-b099-42cf-a044-d268327eb8e7-kube-api-access-l49nk\") pod \"glance-db-create-lblx5\" (UID: \"6b60da41-b099-42cf-a044-d268327eb8e7\") " pod="openstack/glance-db-create-lblx5" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.597856 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxw8n\" (UniqueName: \"kubernetes.io/projected/45fe96ac-432d-4912-a365-ad375be740f0-kube-api-access-jxw8n\") pod \"glance-9167-account-create-update-gt79q\" (UID: \"45fe96ac-432d-4912-a365-ad375be740f0\") " pod="openstack/glance-9167-account-create-update-gt79q" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.699289 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b60da41-b099-42cf-a044-d268327eb8e7-operator-scripts\") pod \"glance-db-create-lblx5\" (UID: \"6b60da41-b099-42cf-a044-d268327eb8e7\") " pod="openstack/glance-db-create-lblx5" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.699347 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l49nk\" (UniqueName: \"kubernetes.io/projected/6b60da41-b099-42cf-a044-d268327eb8e7-kube-api-access-l49nk\") pod \"glance-db-create-lblx5\" (UID: \"6b60da41-b099-42cf-a044-d268327eb8e7\") " pod="openstack/glance-db-create-lblx5" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.699407 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxw8n\" (UniqueName: \"kubernetes.io/projected/45fe96ac-432d-4912-a365-ad375be740f0-kube-api-access-jxw8n\") pod \"glance-9167-account-create-update-gt79q\" (UID: \"45fe96ac-432d-4912-a365-ad375be740f0\") " pod="openstack/glance-9167-account-create-update-gt79q" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.699491 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45fe96ac-432d-4912-a365-ad375be740f0-operator-scripts\") pod \"glance-9167-account-create-update-gt79q\" (UID: \"45fe96ac-432d-4912-a365-ad375be740f0\") " pod="openstack/glance-9167-account-create-update-gt79q" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.700213 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45fe96ac-432d-4912-a365-ad375be740f0-operator-scripts\") pod \"glance-9167-account-create-update-gt79q\" (UID: \"45fe96ac-432d-4912-a365-ad375be740f0\") " pod="openstack/glance-9167-account-create-update-gt79q" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.700444 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b60da41-b099-42cf-a044-d268327eb8e7-operator-scripts\") pod \"glance-db-create-lblx5\" (UID: \"6b60da41-b099-42cf-a044-d268327eb8e7\") " pod="openstack/glance-db-create-lblx5" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.710818 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.722803 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l49nk\" (UniqueName: \"kubernetes.io/projected/6b60da41-b099-42cf-a044-d268327eb8e7-kube-api-access-l49nk\") pod \"glance-db-create-lblx5\" (UID: \"6b60da41-b099-42cf-a044-d268327eb8e7\") " pod="openstack/glance-db-create-lblx5" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.727027 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxw8n\" (UniqueName: \"kubernetes.io/projected/45fe96ac-432d-4912-a365-ad375be740f0-kube-api-access-jxw8n\") pod \"glance-9167-account-create-update-gt79q\" (UID: \"45fe96ac-432d-4912-a365-ad375be740f0\") " pod="openstack/glance-9167-account-create-update-gt79q" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.736909 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-lblx5" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.778661 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9167-account-create-update-gt79q" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.856356 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a71b38c4-ee61-49f2-8c8c-5adc05df2159","Type":"ContainerStarted","Data":"ba1fcee1b1cf54b06e0eded8657cebf7b66dc017f8434f1966c353c37f104084"} Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.857750 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.859168 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8acd1342-fa9f-43be-9c9f-28739a5aed78","Type":"ContainerStarted","Data":"487a6e893e9c9a9b100334baff5d647fb67f51463c392768b47d15a05cd71909"} Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.863120 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef","Type":"ContainerStarted","Data":"15727b25e94c33a02c5db07e68c341ee307de19364380c2d194b03965262fa5b"} Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.863304 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.896424 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-4214-account-create-update-wph5g"] Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.920318 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.183791345 podStartE2EDuration="55.920301728s" podCreationTimestamp="2025-12-04 17:48:08 +0000 UTC" firstStartedPulling="2025-12-04 17:48:10.328680405 +0000 UTC m=+1220.360922403" lastFinishedPulling="2025-12-04 17:48:29.065190788 +0000 UTC m=+1239.097432786" observedRunningTime="2025-12-04 17:49:03.88555295 +0000 UTC m=+1273.917794948" watchObservedRunningTime="2025-12-04 17:49:03.920301728 +0000 UTC m=+1273.952543726" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.926412 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.132458551 podStartE2EDuration="55.926396753s" podCreationTimestamp="2025-12-04 17:48:08 +0000 UTC" firstStartedPulling="2025-12-04 17:48:10.392690483 +0000 UTC m=+1220.424932481" lastFinishedPulling="2025-12-04 17:48:29.186628685 +0000 UTC m=+1239.218870683" observedRunningTime="2025-12-04 17:49:03.917362143 +0000 UTC m=+1273.949604141" watchObservedRunningTime="2025-12-04 17:49:03.926396753 +0000 UTC m=+1273.958638751" Dec 04 17:49:03 crc kubenswrapper[4631]: I1204 17:49:03.992104 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-j97nj"] Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.159150 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-d6rbm"] Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.283582 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-15b3-account-create-update-cxkhf"] Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.429358 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-lblx5"] Dec 04 17:49:04 crc kubenswrapper[4631]: W1204 17:49:04.453761 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b60da41_b099_42cf_a044_d268327eb8e7.slice/crio-6e5175db31eb9a776cad7e1a151059d9cf35cdcd2a33d48aecbda30894fee893 WatchSource:0}: Error finding container 6e5175db31eb9a776cad7e1a151059d9cf35cdcd2a33d48aecbda30894fee893: Status 404 returned error can't find the container with id 6e5175db31eb9a776cad7e1a151059d9cf35cdcd2a33d48aecbda30894fee893 Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.500988 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-9167-account-create-update-gt79q"] Dec 04 17:49:04 crc kubenswrapper[4631]: W1204 17:49:04.516662 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod45fe96ac_432d_4912_a365_ad375be740f0.slice/crio-75736b1a858e9bdea08f843d8e5012d08e80aa4d000f779d73a0236b622ceeee WatchSource:0}: Error finding container 75736b1a858e9bdea08f843d8e5012d08e80aa4d000f779d73a0236b622ceeee: Status 404 returned error can't find the container with id 75736b1a858e9bdea08f843d8e5012d08e80aa4d000f779d73a0236b622ceeee Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.582065 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.732497 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-swiftconf\") pod \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.732797 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-etc-swift\") pod \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.732855 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-dispersionconf\") pod \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.732886 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-scripts\") pod \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.732917 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mhkz7\" (UniqueName: \"kubernetes.io/projected/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-kube-api-access-mhkz7\") pod \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.732971 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-ring-data-devices\") pod \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.733002 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-combined-ca-bundle\") pod \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\" (UID: \"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f\") " Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.735243 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "23cc29b2-48d6-42f1-a2ff-fbd418d0b47f" (UID: "23cc29b2-48d6-42f1-a2ff-fbd418d0b47f"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.735287 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "23cc29b2-48d6-42f1-a2ff-fbd418d0b47f" (UID: "23cc29b2-48d6-42f1-a2ff-fbd418d0b47f"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.744560 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-kube-api-access-mhkz7" (OuterVolumeSpecName: "kube-api-access-mhkz7") pod "23cc29b2-48d6-42f1-a2ff-fbd418d0b47f" (UID: "23cc29b2-48d6-42f1-a2ff-fbd418d0b47f"). InnerVolumeSpecName "kube-api-access-mhkz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.773654 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "23cc29b2-48d6-42f1-a2ff-fbd418d0b47f" (UID: "23cc29b2-48d6-42f1-a2ff-fbd418d0b47f"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.776194 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "23cc29b2-48d6-42f1-a2ff-fbd418d0b47f" (UID: "23cc29b2-48d6-42f1-a2ff-fbd418d0b47f"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.788639 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-scripts" (OuterVolumeSpecName: "scripts") pod "23cc29b2-48d6-42f1-a2ff-fbd418d0b47f" (UID: "23cc29b2-48d6-42f1-a2ff-fbd418d0b47f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.794309 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "23cc29b2-48d6-42f1-a2ff-fbd418d0b47f" (UID: "23cc29b2-48d6-42f1-a2ff-fbd418d0b47f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.834833 4631 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.834874 4631 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.834887 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.834900 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mhkz7\" (UniqueName: \"kubernetes.io/projected/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-kube-api-access-mhkz7\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.834915 4631 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.834926 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.834935 4631 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/23cc29b2-48d6-42f1-a2ff-fbd418d0b47f-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.877160 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-lblx5" event={"ID":"6b60da41-b099-42cf-a044-d268327eb8e7","Type":"ContainerStarted","Data":"5080b7d0504ecf6365bf7989bf1ee5734b38c41069c6ac1cf3904ac2236740c3"} Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.877202 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-lblx5" event={"ID":"6b60da41-b099-42cf-a044-d268327eb8e7","Type":"ContainerStarted","Data":"6e5175db31eb9a776cad7e1a151059d9cf35cdcd2a33d48aecbda30894fee893"} Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.885171 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-15b3-account-create-update-cxkhf" event={"ID":"d07b4988-dc6b-4414-abf4-24abf610ecbf","Type":"ContainerStarted","Data":"3c0f7870a5a221c78a9d2e146e9e1e0f59b7d10646ee685454bc38f108b16bfe"} Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.885217 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-15b3-account-create-update-cxkhf" event={"ID":"d07b4988-dc6b-4414-abf4-24abf610ecbf","Type":"ContainerStarted","Data":"d539ec16c6703fb2d7f683c747a67e5f61234b659b98f092cbd921505f77d402"} Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.897696 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9167-account-create-update-gt79q" event={"ID":"45fe96ac-432d-4912-a365-ad375be740f0","Type":"ContainerStarted","Data":"c4a585bb7c54ce6df32e5648229e5a1335a7a6a4655b04b16a83faeec1f5fe4b"} Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.897741 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9167-account-create-update-gt79q" event={"ID":"45fe96ac-432d-4912-a365-ad375be740f0","Type":"ContainerStarted","Data":"75736b1a858e9bdea08f843d8e5012d08e80aa4d000f779d73a0236b622ceeee"} Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.899455 4631 generic.go:334] "Generic (PLEG): container finished" podID="02956ee6-42ea-4056-ba19-1a79683632b9" containerID="2320af929f7b2276aba2af835bfede5952c85cfc885580b16654f33f88ea3801" exitCode=0 Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.899513 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-d6rbm" event={"ID":"02956ee6-42ea-4056-ba19-1a79683632b9","Type":"ContainerDied","Data":"2320af929f7b2276aba2af835bfede5952c85cfc885580b16654f33f88ea3801"} Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.899534 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-d6rbm" event={"ID":"02956ee6-42ea-4056-ba19-1a79683632b9","Type":"ContainerStarted","Data":"eb88ab15544b9caa2b6e08d38aac4a4bba126af7a2df034e606eae45ffe86618"} Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.921300 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-lblx5" podStartSLOduration=1.9212795489999999 podStartE2EDuration="1.921279549s" podCreationTimestamp="2025-12-04 17:49:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:49:04.897850636 +0000 UTC m=+1274.930092634" watchObservedRunningTime="2025-12-04 17:49:04.921279549 +0000 UTC m=+1274.953521537" Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.941164 4631 generic.go:334] "Generic (PLEG): container finished" podID="5a142a3c-8ece-4f7e-a43d-778c9ad25a32" containerID="31ed7fdf8d2bf4de358e7fba4701f474e7354295f9984e24e38fefff4697d515" exitCode=0 Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.942785 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-4214-account-create-update-wph5g" event={"ID":"5a142a3c-8ece-4f7e-a43d-778c9ad25a32","Type":"ContainerDied","Data":"31ed7fdf8d2bf4de358e7fba4701f474e7354295f9984e24e38fefff4697d515"} Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.942820 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-4214-account-create-update-wph5g" event={"ID":"5a142a3c-8ece-4f7e-a43d-778c9ad25a32","Type":"ContainerStarted","Data":"1c54a86015d4f65ff849e2f6b0617233b318528ecf5a4f507429c3485d182255"} Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.943778 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-9167-account-create-update-gt79q" podStartSLOduration=1.943748304 podStartE2EDuration="1.943748304s" podCreationTimestamp="2025-12-04 17:49:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:49:04.928049774 +0000 UTC m=+1274.960291772" watchObservedRunningTime="2025-12-04 17:49:04.943748304 +0000 UTC m=+1274.975990292" Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.947434 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-xjxf8" Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.948608 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-xjxf8" event={"ID":"23cc29b2-48d6-42f1-a2ff-fbd418d0b47f","Type":"ContainerDied","Data":"94319f74356e73b79e99ed3530a2a4429a52a01d6ad1f04d7c221274736876d1"} Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.948641 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94319f74356e73b79e99ed3530a2a4429a52a01d6ad1f04d7c221274736876d1" Dec 04 17:49:04 crc kubenswrapper[4631]: I1204 17:49:04.980233 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-15b3-account-create-update-cxkhf" podStartSLOduration=1.980213171 podStartE2EDuration="1.980213171s" podCreationTimestamp="2025-12-04 17:49:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:49:04.954161153 +0000 UTC m=+1274.986403151" watchObservedRunningTime="2025-12-04 17:49:04.980213171 +0000 UTC m=+1275.012455169" Dec 04 17:49:05 crc kubenswrapper[4631]: I1204 17:49:05.001507 4631 generic.go:334] "Generic (PLEG): container finished" podID="fab6d82a-4302-4582-ab50-411aae70084a" containerID="b79e1d04d4364ea017e8e769b932deb0f5cfff74110bfaf66547a964f9f65a07" exitCode=0 Dec 04 17:49:05 crc kubenswrapper[4631]: I1204 17:49:05.001937 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-j97nj" event={"ID":"fab6d82a-4302-4582-ab50-411aae70084a","Type":"ContainerDied","Data":"b79e1d04d4364ea017e8e769b932deb0f5cfff74110bfaf66547a964f9f65a07"} Dec 04 17:49:05 crc kubenswrapper[4631]: I1204 17:49:05.001960 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-j97nj" event={"ID":"fab6d82a-4302-4582-ab50-411aae70084a","Type":"ContainerStarted","Data":"72e16ab6f061a261705be09aab090789d38d045dc0f26c86c6efb5ca5e216841"} Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.019038 4631 generic.go:334] "Generic (PLEG): container finished" podID="45fe96ac-432d-4912-a365-ad375be740f0" containerID="c4a585bb7c54ce6df32e5648229e5a1335a7a6a4655b04b16a83faeec1f5fe4b" exitCode=0 Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.019134 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9167-account-create-update-gt79q" event={"ID":"45fe96ac-432d-4912-a365-ad375be740f0","Type":"ContainerDied","Data":"c4a585bb7c54ce6df32e5648229e5a1335a7a6a4655b04b16a83faeec1f5fe4b"} Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.027403 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8acd1342-fa9f-43be-9c9f-28739a5aed78","Type":"ContainerStarted","Data":"7311aca218c0906bcee3b802770d1f50abff49464ca648d23d1cc7b55ce2dfcd"} Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.027448 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8acd1342-fa9f-43be-9c9f-28739a5aed78","Type":"ContainerStarted","Data":"a51514734a8da28110bcdf2e5531aedc5b32e5694ce506fb081a91103a9c7ea0"} Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.035122 4631 generic.go:334] "Generic (PLEG): container finished" podID="6b60da41-b099-42cf-a044-d268327eb8e7" containerID="5080b7d0504ecf6365bf7989bf1ee5734b38c41069c6ac1cf3904ac2236740c3" exitCode=0 Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.035519 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-lblx5" event={"ID":"6b60da41-b099-42cf-a044-d268327eb8e7","Type":"ContainerDied","Data":"5080b7d0504ecf6365bf7989bf1ee5734b38c41069c6ac1cf3904ac2236740c3"} Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.050575 4631 generic.go:334] "Generic (PLEG): container finished" podID="d07b4988-dc6b-4414-abf4-24abf610ecbf" containerID="3c0f7870a5a221c78a9d2e146e9e1e0f59b7d10646ee685454bc38f108b16bfe" exitCode=0 Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.051048 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-15b3-account-create-update-cxkhf" event={"ID":"d07b4988-dc6b-4414-abf4-24abf610ecbf","Type":"ContainerDied","Data":"3c0f7870a5a221c78a9d2e146e9e1e0f59b7d10646ee685454bc38f108b16bfe"} Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.167814 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.597343 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-4214-account-create-update-wph5g" Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.684090 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a142a3c-8ece-4f7e-a43d-778c9ad25a32-operator-scripts\") pod \"5a142a3c-8ece-4f7e-a43d-778c9ad25a32\" (UID: \"5a142a3c-8ece-4f7e-a43d-778c9ad25a32\") " Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.684183 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cmjx\" (UniqueName: \"kubernetes.io/projected/5a142a3c-8ece-4f7e-a43d-778c9ad25a32-kube-api-access-6cmjx\") pod \"5a142a3c-8ece-4f7e-a43d-778c9ad25a32\" (UID: \"5a142a3c-8ece-4f7e-a43d-778c9ad25a32\") " Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.686824 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a142a3c-8ece-4f7e-a43d-778c9ad25a32-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5a142a3c-8ece-4f7e-a43d-778c9ad25a32" (UID: "5a142a3c-8ece-4f7e-a43d-778c9ad25a32"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.692250 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a142a3c-8ece-4f7e-a43d-778c9ad25a32-kube-api-access-6cmjx" (OuterVolumeSpecName: "kube-api-access-6cmjx") pod "5a142a3c-8ece-4f7e-a43d-778c9ad25a32" (UID: "5a142a3c-8ece-4f7e-a43d-778c9ad25a32"). InnerVolumeSpecName "kube-api-access-6cmjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.777805 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-d6rbm" Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.785493 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-j97nj" Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.785553 4631 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a142a3c-8ece-4f7e-a43d-778c9ad25a32-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.785574 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cmjx\" (UniqueName: \"kubernetes.io/projected/5a142a3c-8ece-4f7e-a43d-778c9ad25a32-kube-api-access-6cmjx\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.886932 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-542wc\" (UniqueName: \"kubernetes.io/projected/02956ee6-42ea-4056-ba19-1a79683632b9-kube-api-access-542wc\") pod \"02956ee6-42ea-4056-ba19-1a79683632b9\" (UID: \"02956ee6-42ea-4056-ba19-1a79683632b9\") " Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.887032 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02956ee6-42ea-4056-ba19-1a79683632b9-operator-scripts\") pod \"02956ee6-42ea-4056-ba19-1a79683632b9\" (UID: \"02956ee6-42ea-4056-ba19-1a79683632b9\") " Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.887078 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fab6d82a-4302-4582-ab50-411aae70084a-operator-scripts\") pod \"fab6d82a-4302-4582-ab50-411aae70084a\" (UID: \"fab6d82a-4302-4582-ab50-411aae70084a\") " Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.887185 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7hqt\" (UniqueName: \"kubernetes.io/projected/fab6d82a-4302-4582-ab50-411aae70084a-kube-api-access-j7hqt\") pod \"fab6d82a-4302-4582-ab50-411aae70084a\" (UID: \"fab6d82a-4302-4582-ab50-411aae70084a\") " Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.887905 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/02956ee6-42ea-4056-ba19-1a79683632b9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "02956ee6-42ea-4056-ba19-1a79683632b9" (UID: "02956ee6-42ea-4056-ba19-1a79683632b9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.888010 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fab6d82a-4302-4582-ab50-411aae70084a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fab6d82a-4302-4582-ab50-411aae70084a" (UID: "fab6d82a-4302-4582-ab50-411aae70084a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.890459 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fab6d82a-4302-4582-ab50-411aae70084a-kube-api-access-j7hqt" (OuterVolumeSpecName: "kube-api-access-j7hqt") pod "fab6d82a-4302-4582-ab50-411aae70084a" (UID: "fab6d82a-4302-4582-ab50-411aae70084a"). InnerVolumeSpecName "kube-api-access-j7hqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.891046 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02956ee6-42ea-4056-ba19-1a79683632b9-kube-api-access-542wc" (OuterVolumeSpecName: "kube-api-access-542wc") pod "02956ee6-42ea-4056-ba19-1a79683632b9" (UID: "02956ee6-42ea-4056-ba19-1a79683632b9"). InnerVolumeSpecName "kube-api-access-542wc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.988852 4631 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02956ee6-42ea-4056-ba19-1a79683632b9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.988886 4631 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fab6d82a-4302-4582-ab50-411aae70084a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.988895 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7hqt\" (UniqueName: \"kubernetes.io/projected/fab6d82a-4302-4582-ab50-411aae70084a-kube-api-access-j7hqt\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:06 crc kubenswrapper[4631]: I1204 17:49:06.988905 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-542wc\" (UniqueName: \"kubernetes.io/projected/02956ee6-42ea-4056-ba19-1a79683632b9-kube-api-access-542wc\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.061087 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-d6rbm" event={"ID":"02956ee6-42ea-4056-ba19-1a79683632b9","Type":"ContainerDied","Data":"eb88ab15544b9caa2b6e08d38aac4a4bba126af7a2df034e606eae45ffe86618"} Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.061141 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb88ab15544b9caa2b6e08d38aac4a4bba126af7a2df034e606eae45ffe86618" Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.061137 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-d6rbm" Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.065423 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8acd1342-fa9f-43be-9c9f-28739a5aed78","Type":"ContainerStarted","Data":"bb5265eafcd4bfd5b802b43699a1a31b735c2e317fac0aefe40dcf199aab7875"} Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.065477 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8acd1342-fa9f-43be-9c9f-28739a5aed78","Type":"ContainerStarted","Data":"25b8f69477f0d3546b7204f5dcd66d895b4d1ee50ebe3428e58a7b10b254c4d2"} Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.067155 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-4214-account-create-update-wph5g" event={"ID":"5a142a3c-8ece-4f7e-a43d-778c9ad25a32","Type":"ContainerDied","Data":"1c54a86015d4f65ff849e2f6b0617233b318528ecf5a4f507429c3485d182255"} Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.067193 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c54a86015d4f65ff849e2f6b0617233b318528ecf5a4f507429c3485d182255" Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.067241 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-4214-account-create-update-wph5g" Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.072817 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-j97nj" Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.074349 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-j97nj" event={"ID":"fab6d82a-4302-4582-ab50-411aae70084a","Type":"ContainerDied","Data":"72e16ab6f061a261705be09aab090789d38d045dc0f26c86c6efb5ca5e216841"} Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.074399 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72e16ab6f061a261705be09aab090789d38d045dc0f26c86c6efb5ca5e216841" Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.514893 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-15b3-account-create-update-cxkhf" Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.648106 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpmxk\" (UniqueName: \"kubernetes.io/projected/d07b4988-dc6b-4414-abf4-24abf610ecbf-kube-api-access-tpmxk\") pod \"d07b4988-dc6b-4414-abf4-24abf610ecbf\" (UID: \"d07b4988-dc6b-4414-abf4-24abf610ecbf\") " Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.648257 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d07b4988-dc6b-4414-abf4-24abf610ecbf-operator-scripts\") pod \"d07b4988-dc6b-4414-abf4-24abf610ecbf\" (UID: \"d07b4988-dc6b-4414-abf4-24abf610ecbf\") " Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.649030 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d07b4988-dc6b-4414-abf4-24abf610ecbf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d07b4988-dc6b-4414-abf4-24abf610ecbf" (UID: "d07b4988-dc6b-4414-abf4-24abf610ecbf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.652107 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d07b4988-dc6b-4414-abf4-24abf610ecbf-kube-api-access-tpmxk" (OuterVolumeSpecName: "kube-api-access-tpmxk") pod "d07b4988-dc6b-4414-abf4-24abf610ecbf" (UID: "d07b4988-dc6b-4414-abf4-24abf610ecbf"). InnerVolumeSpecName "kube-api-access-tpmxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.750603 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpmxk\" (UniqueName: \"kubernetes.io/projected/d07b4988-dc6b-4414-abf4-24abf610ecbf-kube-api-access-tpmxk\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.750631 4631 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d07b4988-dc6b-4414-abf4-24abf610ecbf-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.923976 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-lblx5" Dec 04 17:49:07 crc kubenswrapper[4631]: I1204 17:49:07.931018 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9167-account-create-update-gt79q" Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.054094 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b60da41-b099-42cf-a044-d268327eb8e7-operator-scripts\") pod \"6b60da41-b099-42cf-a044-d268327eb8e7\" (UID: \"6b60da41-b099-42cf-a044-d268327eb8e7\") " Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.054260 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l49nk\" (UniqueName: \"kubernetes.io/projected/6b60da41-b099-42cf-a044-d268327eb8e7-kube-api-access-l49nk\") pod \"6b60da41-b099-42cf-a044-d268327eb8e7\" (UID: \"6b60da41-b099-42cf-a044-d268327eb8e7\") " Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.054363 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxw8n\" (UniqueName: \"kubernetes.io/projected/45fe96ac-432d-4912-a365-ad375be740f0-kube-api-access-jxw8n\") pod \"45fe96ac-432d-4912-a365-ad375be740f0\" (UID: \"45fe96ac-432d-4912-a365-ad375be740f0\") " Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.054409 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45fe96ac-432d-4912-a365-ad375be740f0-operator-scripts\") pod \"45fe96ac-432d-4912-a365-ad375be740f0\" (UID: \"45fe96ac-432d-4912-a365-ad375be740f0\") " Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.055153 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b60da41-b099-42cf-a044-d268327eb8e7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6b60da41-b099-42cf-a044-d268327eb8e7" (UID: "6b60da41-b099-42cf-a044-d268327eb8e7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.057138 4631 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b60da41-b099-42cf-a044-d268327eb8e7-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.058947 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45fe96ac-432d-4912-a365-ad375be740f0-kube-api-access-jxw8n" (OuterVolumeSpecName: "kube-api-access-jxw8n") pod "45fe96ac-432d-4912-a365-ad375be740f0" (UID: "45fe96ac-432d-4912-a365-ad375be740f0"). InnerVolumeSpecName "kube-api-access-jxw8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.059043 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b60da41-b099-42cf-a044-d268327eb8e7-kube-api-access-l49nk" (OuterVolumeSpecName: "kube-api-access-l49nk") pod "6b60da41-b099-42cf-a044-d268327eb8e7" (UID: "6b60da41-b099-42cf-a044-d268327eb8e7"). InnerVolumeSpecName "kube-api-access-l49nk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.067593 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45fe96ac-432d-4912-a365-ad375be740f0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "45fe96ac-432d-4912-a365-ad375be740f0" (UID: "45fe96ac-432d-4912-a365-ad375be740f0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.083786 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-15b3-account-create-update-cxkhf" event={"ID":"d07b4988-dc6b-4414-abf4-24abf610ecbf","Type":"ContainerDied","Data":"d539ec16c6703fb2d7f683c747a67e5f61234b659b98f092cbd921505f77d402"} Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.083858 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d539ec16c6703fb2d7f683c747a67e5f61234b659b98f092cbd921505f77d402" Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.083929 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-15b3-account-create-update-cxkhf" Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.085736 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9167-account-create-update-gt79q" event={"ID":"45fe96ac-432d-4912-a365-ad375be740f0","Type":"ContainerDied","Data":"75736b1a858e9bdea08f843d8e5012d08e80aa4d000f779d73a0236b622ceeee"} Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.085776 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="75736b1a858e9bdea08f843d8e5012d08e80aa4d000f779d73a0236b622ceeee" Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.085910 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9167-account-create-update-gt79q" Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.087412 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-lblx5" event={"ID":"6b60da41-b099-42cf-a044-d268327eb8e7","Type":"ContainerDied","Data":"6e5175db31eb9a776cad7e1a151059d9cf35cdcd2a33d48aecbda30894fee893"} Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.087434 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e5175db31eb9a776cad7e1a151059d9cf35cdcd2a33d48aecbda30894fee893" Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.087560 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-lblx5" Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.161931 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxw8n\" (UniqueName: \"kubernetes.io/projected/45fe96ac-432d-4912-a365-ad375be740f0-kube-api-access-jxw8n\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.161974 4631 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45fe96ac-432d-4912-a365-ad375be740f0-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:08 crc kubenswrapper[4631]: I1204 17:49:08.161983 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l49nk\" (UniqueName: \"kubernetes.io/projected/6b60da41-b099-42cf-a044-d268327eb8e7-kube-api-access-l49nk\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:09 crc kubenswrapper[4631]: I1204 17:49:09.101816 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8acd1342-fa9f-43be-9c9f-28739a5aed78","Type":"ContainerStarted","Data":"1cd573d7a9df65de02f6dad684fc991de553a0dc0d3a048640b9e8612b17d588"} Dec 04 17:49:09 crc kubenswrapper[4631]: I1204 17:49:09.102458 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8acd1342-fa9f-43be-9c9f-28739a5aed78","Type":"ContainerStarted","Data":"aa86272b47f90b9ab3458b00de4387ebee10e79e8f28803fd7068963f19c448d"} Dec 04 17:49:09 crc kubenswrapper[4631]: I1204 17:49:09.102472 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8acd1342-fa9f-43be-9c9f-28739a5aed78","Type":"ContainerStarted","Data":"672a9afc3a00e7fd528a44cbd72bdbdec816ab326ce4d402ea603fc6d684c950"} Dec 04 17:49:09 crc kubenswrapper[4631]: I1204 17:49:09.102484 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8acd1342-fa9f-43be-9c9f-28739a5aed78","Type":"ContainerStarted","Data":"8fdaa5e1c806226a09e9a981896d8be966ad63bf930c8f11b51ac97cc0ce9c0f"} Dec 04 17:49:10 crc kubenswrapper[4631]: I1204 17:49:10.134309 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8acd1342-fa9f-43be-9c9f-28739a5aed78","Type":"ContainerStarted","Data":"829d49cb53604e10f1bf5afce3d09fff0758260f1d6b9b544015837549f422cd"} Dec 04 17:49:10 crc kubenswrapper[4631]: I1204 17:49:10.134716 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8acd1342-fa9f-43be-9c9f-28739a5aed78","Type":"ContainerStarted","Data":"77cd199a6c6df4b07dcc232335517440fd3a568654b59842c6536560f87376f3"} Dec 04 17:49:11 crc kubenswrapper[4631]: I1204 17:49:11.149637 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8acd1342-fa9f-43be-9c9f-28739a5aed78","Type":"ContainerStarted","Data":"2af51c1eb52a4bd47ce6d4ac8882cb54d2a4884a14b6e4e79b8f777920bd38f2"} Dec 04 17:49:11 crc kubenswrapper[4631]: I1204 17:49:11.149699 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8acd1342-fa9f-43be-9c9f-28739a5aed78","Type":"ContainerStarted","Data":"cc1f693614583a587ec1c5a052b6f15e792b0434c3c1400e064ee908b85a4993"} Dec 04 17:49:11 crc kubenswrapper[4631]: I1204 17:49:11.149710 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8acd1342-fa9f-43be-9c9f-28739a5aed78","Type":"ContainerStarted","Data":"3d6e2c4bbc8ddff02d8525f2f02e66bb0423813798112b2831114ba50b9f0727"} Dec 04 17:49:11 crc kubenswrapper[4631]: I1204 17:49:11.149720 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8acd1342-fa9f-43be-9c9f-28739a5aed78","Type":"ContainerStarted","Data":"92da43413de20788e945ef1e4f5b803a2951e3aafb445720053be417785e53f4"} Dec 04 17:49:11 crc kubenswrapper[4631]: I1204 17:49:11.149729 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8acd1342-fa9f-43be-9c9f-28739a5aed78","Type":"ContainerStarted","Data":"e29d3a5f044853809877bfd4669029c3a74b1431528d81139554e215d88aa3d4"} Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.570511 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=21.615894597 podStartE2EDuration="27.570493821s" podCreationTimestamp="2025-12-04 17:48:45 +0000 UTC" firstStartedPulling="2025-12-04 17:49:03.752224872 +0000 UTC m=+1273.784466870" lastFinishedPulling="2025-12-04 17:49:09.706824096 +0000 UTC m=+1279.739066094" observedRunningTime="2025-12-04 17:49:12.201605939 +0000 UTC m=+1282.233847947" watchObservedRunningTime="2025-12-04 17:49:12.570493821 +0000 UTC m=+1282.602735809" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.571534 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-dsc5v"] Dec 04 17:49:12 crc kubenswrapper[4631]: E1204 17:49:12.571814 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b60da41-b099-42cf-a044-d268327eb8e7" containerName="mariadb-database-create" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.571827 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b60da41-b099-42cf-a044-d268327eb8e7" containerName="mariadb-database-create" Dec 04 17:49:12 crc kubenswrapper[4631]: E1204 17:49:12.571839 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a142a3c-8ece-4f7e-a43d-778c9ad25a32" containerName="mariadb-account-create-update" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.571846 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a142a3c-8ece-4f7e-a43d-778c9ad25a32" containerName="mariadb-account-create-update" Dec 04 17:49:12 crc kubenswrapper[4631]: E1204 17:49:12.571858 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d07b4988-dc6b-4414-abf4-24abf610ecbf" containerName="mariadb-account-create-update" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.571864 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="d07b4988-dc6b-4414-abf4-24abf610ecbf" containerName="mariadb-account-create-update" Dec 04 17:49:12 crc kubenswrapper[4631]: E1204 17:49:12.571875 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23cc29b2-48d6-42f1-a2ff-fbd418d0b47f" containerName="swift-ring-rebalance" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.571881 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="23cc29b2-48d6-42f1-a2ff-fbd418d0b47f" containerName="swift-ring-rebalance" Dec 04 17:49:12 crc kubenswrapper[4631]: E1204 17:49:12.571893 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45fe96ac-432d-4912-a365-ad375be740f0" containerName="mariadb-account-create-update" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.571899 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="45fe96ac-432d-4912-a365-ad375be740f0" containerName="mariadb-account-create-update" Dec 04 17:49:12 crc kubenswrapper[4631]: E1204 17:49:12.571916 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fab6d82a-4302-4582-ab50-411aae70084a" containerName="mariadb-database-create" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.571922 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="fab6d82a-4302-4582-ab50-411aae70084a" containerName="mariadb-database-create" Dec 04 17:49:12 crc kubenswrapper[4631]: E1204 17:49:12.571934 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02956ee6-42ea-4056-ba19-1a79683632b9" containerName="mariadb-database-create" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.571939 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="02956ee6-42ea-4056-ba19-1a79683632b9" containerName="mariadb-database-create" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.572092 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="fab6d82a-4302-4582-ab50-411aae70084a" containerName="mariadb-database-create" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.572119 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="02956ee6-42ea-4056-ba19-1a79683632b9" containerName="mariadb-database-create" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.572129 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="45fe96ac-432d-4912-a365-ad375be740f0" containerName="mariadb-account-create-update" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.572137 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="d07b4988-dc6b-4414-abf4-24abf610ecbf" containerName="mariadb-account-create-update" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.572147 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b60da41-b099-42cf-a044-d268327eb8e7" containerName="mariadb-database-create" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.572156 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a142a3c-8ece-4f7e-a43d-778c9ad25a32" containerName="mariadb-account-create-update" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.572168 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="23cc29b2-48d6-42f1-a2ff-fbd418d0b47f" containerName="swift-ring-rebalance" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.572950 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.577619 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.591898 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-dsc5v"] Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.732453 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-dsc5v\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.732518 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-dsc5v\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.732659 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-dsc5v\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.732699 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rj5jj\" (UniqueName: \"kubernetes.io/projected/d8911a06-8f51-4c42-a76e-0daa74d11bed-kube-api-access-rj5jj\") pod \"dnsmasq-dns-77585f5f8c-dsc5v\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.732785 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-dsc5v\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.732870 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-config\") pod \"dnsmasq-dns-77585f5f8c-dsc5v\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.834260 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-dsc5v\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.834339 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-dsc5v\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.834362 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rj5jj\" (UniqueName: \"kubernetes.io/projected/d8911a06-8f51-4c42-a76e-0daa74d11bed-kube-api-access-rj5jj\") pod \"dnsmasq-dns-77585f5f8c-dsc5v\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.834426 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-dsc5v\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.834497 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-config\") pod \"dnsmasq-dns-77585f5f8c-dsc5v\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.834537 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-dsc5v\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.835294 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-dsc5v\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.835326 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-dsc5v\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.835429 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-config\") pod \"dnsmasq-dns-77585f5f8c-dsc5v\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.835552 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-dsc5v\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.835681 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-dsc5v\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.857071 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rj5jj\" (UniqueName: \"kubernetes.io/projected/d8911a06-8f51-4c42-a76e-0daa74d11bed-kube-api-access-rj5jj\") pod \"dnsmasq-dns-77585f5f8c-dsc5v\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:12 crc kubenswrapper[4631]: I1204 17:49:12.892460 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.247447 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-2vnfm" podUID="10032f10-bb41-4039-a44d-ca336b45d4df" containerName="ovn-controller" probeResult="failure" output=< Dec 04 17:49:13 crc kubenswrapper[4631]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 04 17:49:13 crc kubenswrapper[4631]: > Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.302353 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.310860 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-gsp75" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.392960 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-dsc5v"] Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.548260 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-2vnfm-config-mbvtg"] Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.549579 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.551939 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.566240 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-2vnfm-config-mbvtg"] Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.620831 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-4hqks"] Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.623565 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-4hqks" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.627735 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.628011 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-ntp7m" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.632503 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-4hqks"] Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.658468 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/db78b05a-3d74-4e26-8e36-1d72c07da424-var-run\") pod \"ovn-controller-2vnfm-config-mbvtg\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.658516 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/db78b05a-3d74-4e26-8e36-1d72c07da424-var-log-ovn\") pod \"ovn-controller-2vnfm-config-mbvtg\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.658557 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cz2wb\" (UniqueName: \"kubernetes.io/projected/db78b05a-3d74-4e26-8e36-1d72c07da424-kube-api-access-cz2wb\") pod \"ovn-controller-2vnfm-config-mbvtg\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.658586 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/db78b05a-3d74-4e26-8e36-1d72c07da424-var-run-ovn\") pod \"ovn-controller-2vnfm-config-mbvtg\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.658605 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db78b05a-3d74-4e26-8e36-1d72c07da424-scripts\") pod \"ovn-controller-2vnfm-config-mbvtg\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.658654 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/db78b05a-3d74-4e26-8e36-1d72c07da424-additional-scripts\") pod \"ovn-controller-2vnfm-config-mbvtg\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.760129 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/db78b05a-3d74-4e26-8e36-1d72c07da424-additional-scripts\") pod \"ovn-controller-2vnfm-config-mbvtg\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.760186 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf9lq\" (UniqueName: \"kubernetes.io/projected/30425aae-4c9c-445c-8d10-d4e5874fda30-kube-api-access-zf9lq\") pod \"glance-db-sync-4hqks\" (UID: \"30425aae-4c9c-445c-8d10-d4e5874fda30\") " pod="openstack/glance-db-sync-4hqks" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.760243 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/db78b05a-3d74-4e26-8e36-1d72c07da424-var-run\") pod \"ovn-controller-2vnfm-config-mbvtg\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.760265 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/db78b05a-3d74-4e26-8e36-1d72c07da424-var-log-ovn\") pod \"ovn-controller-2vnfm-config-mbvtg\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.760280 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/30425aae-4c9c-445c-8d10-d4e5874fda30-db-sync-config-data\") pod \"glance-db-sync-4hqks\" (UID: \"30425aae-4c9c-445c-8d10-d4e5874fda30\") " pod="openstack/glance-db-sync-4hqks" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.760316 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cz2wb\" (UniqueName: \"kubernetes.io/projected/db78b05a-3d74-4e26-8e36-1d72c07da424-kube-api-access-cz2wb\") pod \"ovn-controller-2vnfm-config-mbvtg\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.760345 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/db78b05a-3d74-4e26-8e36-1d72c07da424-var-run-ovn\") pod \"ovn-controller-2vnfm-config-mbvtg\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.760364 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db78b05a-3d74-4e26-8e36-1d72c07da424-scripts\") pod \"ovn-controller-2vnfm-config-mbvtg\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.760418 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30425aae-4c9c-445c-8d10-d4e5874fda30-config-data\") pod \"glance-db-sync-4hqks\" (UID: \"30425aae-4c9c-445c-8d10-d4e5874fda30\") " pod="openstack/glance-db-sync-4hqks" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.760435 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30425aae-4c9c-445c-8d10-d4e5874fda30-combined-ca-bundle\") pod \"glance-db-sync-4hqks\" (UID: \"30425aae-4c9c-445c-8d10-d4e5874fda30\") " pod="openstack/glance-db-sync-4hqks" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.760640 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/db78b05a-3d74-4e26-8e36-1d72c07da424-var-log-ovn\") pod \"ovn-controller-2vnfm-config-mbvtg\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.760710 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/db78b05a-3d74-4e26-8e36-1d72c07da424-var-run\") pod \"ovn-controller-2vnfm-config-mbvtg\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.760765 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/db78b05a-3d74-4e26-8e36-1d72c07da424-var-run-ovn\") pod \"ovn-controller-2vnfm-config-mbvtg\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.761040 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/db78b05a-3d74-4e26-8e36-1d72c07da424-additional-scripts\") pod \"ovn-controller-2vnfm-config-mbvtg\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.762300 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db78b05a-3d74-4e26-8e36-1d72c07da424-scripts\") pod \"ovn-controller-2vnfm-config-mbvtg\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.776193 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cz2wb\" (UniqueName: \"kubernetes.io/projected/db78b05a-3d74-4e26-8e36-1d72c07da424-kube-api-access-cz2wb\") pod \"ovn-controller-2vnfm-config-mbvtg\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.862173 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/30425aae-4c9c-445c-8d10-d4e5874fda30-db-sync-config-data\") pod \"glance-db-sync-4hqks\" (UID: \"30425aae-4c9c-445c-8d10-d4e5874fda30\") " pod="openstack/glance-db-sync-4hqks" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.862282 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30425aae-4c9c-445c-8d10-d4e5874fda30-config-data\") pod \"glance-db-sync-4hqks\" (UID: \"30425aae-4c9c-445c-8d10-d4e5874fda30\") " pod="openstack/glance-db-sync-4hqks" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.862301 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30425aae-4c9c-445c-8d10-d4e5874fda30-combined-ca-bundle\") pod \"glance-db-sync-4hqks\" (UID: \"30425aae-4c9c-445c-8d10-d4e5874fda30\") " pod="openstack/glance-db-sync-4hqks" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.862328 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf9lq\" (UniqueName: \"kubernetes.io/projected/30425aae-4c9c-445c-8d10-d4e5874fda30-kube-api-access-zf9lq\") pod \"glance-db-sync-4hqks\" (UID: \"30425aae-4c9c-445c-8d10-d4e5874fda30\") " pod="openstack/glance-db-sync-4hqks" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.865990 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30425aae-4c9c-445c-8d10-d4e5874fda30-config-data\") pod \"glance-db-sync-4hqks\" (UID: \"30425aae-4c9c-445c-8d10-d4e5874fda30\") " pod="openstack/glance-db-sync-4hqks" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.866035 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/30425aae-4c9c-445c-8d10-d4e5874fda30-db-sync-config-data\") pod \"glance-db-sync-4hqks\" (UID: \"30425aae-4c9c-445c-8d10-d4e5874fda30\") " pod="openstack/glance-db-sync-4hqks" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.866883 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30425aae-4c9c-445c-8d10-d4e5874fda30-combined-ca-bundle\") pod \"glance-db-sync-4hqks\" (UID: \"30425aae-4c9c-445c-8d10-d4e5874fda30\") " pod="openstack/glance-db-sync-4hqks" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.872896 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.878272 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf9lq\" (UniqueName: \"kubernetes.io/projected/30425aae-4c9c-445c-8d10-d4e5874fda30-kube-api-access-zf9lq\") pod \"glance-db-sync-4hqks\" (UID: \"30425aae-4c9c-445c-8d10-d4e5874fda30\") " pod="openstack/glance-db-sync-4hqks" Dec 04 17:49:13 crc kubenswrapper[4631]: I1204 17:49:13.943073 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-4hqks" Dec 04 17:49:14 crc kubenswrapper[4631]: I1204 17:49:14.179343 4631 generic.go:334] "Generic (PLEG): container finished" podID="d8911a06-8f51-4c42-a76e-0daa74d11bed" containerID="0719d53a5af1d545581af2f5e78711c9687418fa9c1eb4c4f8b81649db285098" exitCode=0 Dec 04 17:49:14 crc kubenswrapper[4631]: I1204 17:49:14.179500 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" event={"ID":"d8911a06-8f51-4c42-a76e-0daa74d11bed","Type":"ContainerDied","Data":"0719d53a5af1d545581af2f5e78711c9687418fa9c1eb4c4f8b81649db285098"} Dec 04 17:49:14 crc kubenswrapper[4631]: I1204 17:49:14.180091 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" event={"ID":"d8911a06-8f51-4c42-a76e-0daa74d11bed","Type":"ContainerStarted","Data":"6632f359994806bef560616dec39cf39a860ae6d0ce8d5b2474167cd73df7be0"} Dec 04 17:49:14 crc kubenswrapper[4631]: I1204 17:49:14.311341 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-2vnfm-config-mbvtg"] Dec 04 17:49:14 crc kubenswrapper[4631]: W1204 17:49:14.316242 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb78b05a_3d74_4e26_8e36_1d72c07da424.slice/crio-7ba52da5bd627c442325f0632fb62dea7b603c32deb2b2e811019537b8382141 WatchSource:0}: Error finding container 7ba52da5bd627c442325f0632fb62dea7b603c32deb2b2e811019537b8382141: Status 404 returned error can't find the container with id 7ba52da5bd627c442325f0632fb62dea7b603c32deb2b2e811019537b8382141 Dec 04 17:49:14 crc kubenswrapper[4631]: I1204 17:49:14.493977 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-4hqks"] Dec 04 17:49:14 crc kubenswrapper[4631]: W1204 17:49:14.503465 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30425aae_4c9c_445c_8d10_d4e5874fda30.slice/crio-05f31d63faf1a4b9c71aa02e8811f056611f3be3e88356f790cab4cffa95dc16 WatchSource:0}: Error finding container 05f31d63faf1a4b9c71aa02e8811f056611f3be3e88356f790cab4cffa95dc16: Status 404 returned error can't find the container with id 05f31d63faf1a4b9c71aa02e8811f056611f3be3e88356f790cab4cffa95dc16 Dec 04 17:49:15 crc kubenswrapper[4631]: I1204 17:49:15.189217 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" event={"ID":"d8911a06-8f51-4c42-a76e-0daa74d11bed","Type":"ContainerStarted","Data":"b094f76ae4b46ac32fd0f41c23f28941e9609f035b9c67b8c61a757d175f5cae"} Dec 04 17:49:15 crc kubenswrapper[4631]: I1204 17:49:15.189633 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:15 crc kubenswrapper[4631]: I1204 17:49:15.191105 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-4hqks" event={"ID":"30425aae-4c9c-445c-8d10-d4e5874fda30","Type":"ContainerStarted","Data":"05f31d63faf1a4b9c71aa02e8811f056611f3be3e88356f790cab4cffa95dc16"} Dec 04 17:49:15 crc kubenswrapper[4631]: I1204 17:49:15.195308 4631 generic.go:334] "Generic (PLEG): container finished" podID="db78b05a-3d74-4e26-8e36-1d72c07da424" containerID="455e1ee9ee6c43605f18f76355220fdd58d5b14ee824b79f6f5eac1cffd96926" exitCode=0 Dec 04 17:49:15 crc kubenswrapper[4631]: I1204 17:49:15.195407 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2vnfm-config-mbvtg" event={"ID":"db78b05a-3d74-4e26-8e36-1d72c07da424","Type":"ContainerDied","Data":"455e1ee9ee6c43605f18f76355220fdd58d5b14ee824b79f6f5eac1cffd96926"} Dec 04 17:49:15 crc kubenswrapper[4631]: I1204 17:49:15.195443 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2vnfm-config-mbvtg" event={"ID":"db78b05a-3d74-4e26-8e36-1d72c07da424","Type":"ContainerStarted","Data":"7ba52da5bd627c442325f0632fb62dea7b603c32deb2b2e811019537b8382141"} Dec 04 17:49:15 crc kubenswrapper[4631]: I1204 17:49:15.226488 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" podStartSLOduration=3.226471693 podStartE2EDuration="3.226471693s" podCreationTimestamp="2025-12-04 17:49:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:49:15.221950244 +0000 UTC m=+1285.254192242" watchObservedRunningTime="2025-12-04 17:49:15.226471693 +0000 UTC m=+1285.258713691" Dec 04 17:49:16 crc kubenswrapper[4631]: I1204 17:49:16.541347 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:16 crc kubenswrapper[4631]: I1204 17:49:16.713863 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cz2wb\" (UniqueName: \"kubernetes.io/projected/db78b05a-3d74-4e26-8e36-1d72c07da424-kube-api-access-cz2wb\") pod \"db78b05a-3d74-4e26-8e36-1d72c07da424\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " Dec 04 17:49:16 crc kubenswrapper[4631]: I1204 17:49:16.713925 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/db78b05a-3d74-4e26-8e36-1d72c07da424-var-run\") pod \"db78b05a-3d74-4e26-8e36-1d72c07da424\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " Dec 04 17:49:16 crc kubenswrapper[4631]: I1204 17:49:16.713948 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/db78b05a-3d74-4e26-8e36-1d72c07da424-var-log-ovn\") pod \"db78b05a-3d74-4e26-8e36-1d72c07da424\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " Dec 04 17:49:16 crc kubenswrapper[4631]: I1204 17:49:16.714024 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/db78b05a-3d74-4e26-8e36-1d72c07da424-var-run-ovn\") pod \"db78b05a-3d74-4e26-8e36-1d72c07da424\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " Dec 04 17:49:16 crc kubenswrapper[4631]: I1204 17:49:16.714032 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db78b05a-3d74-4e26-8e36-1d72c07da424-var-run" (OuterVolumeSpecName: "var-run") pod "db78b05a-3d74-4e26-8e36-1d72c07da424" (UID: "db78b05a-3d74-4e26-8e36-1d72c07da424"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:49:16 crc kubenswrapper[4631]: I1204 17:49:16.714059 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db78b05a-3d74-4e26-8e36-1d72c07da424-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "db78b05a-3d74-4e26-8e36-1d72c07da424" (UID: "db78b05a-3d74-4e26-8e36-1d72c07da424"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:49:16 crc kubenswrapper[4631]: I1204 17:49:16.714118 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/db78b05a-3d74-4e26-8e36-1d72c07da424-additional-scripts\") pod \"db78b05a-3d74-4e26-8e36-1d72c07da424\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " Dec 04 17:49:16 crc kubenswrapper[4631]: I1204 17:49:16.714147 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db78b05a-3d74-4e26-8e36-1d72c07da424-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "db78b05a-3d74-4e26-8e36-1d72c07da424" (UID: "db78b05a-3d74-4e26-8e36-1d72c07da424"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:49:16 crc kubenswrapper[4631]: I1204 17:49:16.714157 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db78b05a-3d74-4e26-8e36-1d72c07da424-scripts\") pod \"db78b05a-3d74-4e26-8e36-1d72c07da424\" (UID: \"db78b05a-3d74-4e26-8e36-1d72c07da424\") " Dec 04 17:49:16 crc kubenswrapper[4631]: I1204 17:49:16.714526 4631 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/db78b05a-3d74-4e26-8e36-1d72c07da424-var-run\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:16 crc kubenswrapper[4631]: I1204 17:49:16.714541 4631 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/db78b05a-3d74-4e26-8e36-1d72c07da424-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:16 crc kubenswrapper[4631]: I1204 17:49:16.714550 4631 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/db78b05a-3d74-4e26-8e36-1d72c07da424-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:16 crc kubenswrapper[4631]: I1204 17:49:16.715028 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db78b05a-3d74-4e26-8e36-1d72c07da424-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "db78b05a-3d74-4e26-8e36-1d72c07da424" (UID: "db78b05a-3d74-4e26-8e36-1d72c07da424"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:16 crc kubenswrapper[4631]: I1204 17:49:16.715225 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db78b05a-3d74-4e26-8e36-1d72c07da424-scripts" (OuterVolumeSpecName: "scripts") pod "db78b05a-3d74-4e26-8e36-1d72c07da424" (UID: "db78b05a-3d74-4e26-8e36-1d72c07da424"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:16 crc kubenswrapper[4631]: I1204 17:49:16.725311 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db78b05a-3d74-4e26-8e36-1d72c07da424-kube-api-access-cz2wb" (OuterVolumeSpecName: "kube-api-access-cz2wb") pod "db78b05a-3d74-4e26-8e36-1d72c07da424" (UID: "db78b05a-3d74-4e26-8e36-1d72c07da424"). InnerVolumeSpecName "kube-api-access-cz2wb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:49:16 crc kubenswrapper[4631]: I1204 17:49:16.816192 4631 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/db78b05a-3d74-4e26-8e36-1d72c07da424-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:16 crc kubenswrapper[4631]: I1204 17:49:16.816541 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db78b05a-3d74-4e26-8e36-1d72c07da424-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:16 crc kubenswrapper[4631]: I1204 17:49:16.816552 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cz2wb\" (UniqueName: \"kubernetes.io/projected/db78b05a-3d74-4e26-8e36-1d72c07da424-kube-api-access-cz2wb\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:17 crc kubenswrapper[4631]: I1204 17:49:17.218602 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2vnfm-config-mbvtg" Dec 04 17:49:17 crc kubenswrapper[4631]: I1204 17:49:17.218617 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2vnfm-config-mbvtg" event={"ID":"db78b05a-3d74-4e26-8e36-1d72c07da424","Type":"ContainerDied","Data":"7ba52da5bd627c442325f0632fb62dea7b603c32deb2b2e811019537b8382141"} Dec 04 17:49:17 crc kubenswrapper[4631]: I1204 17:49:17.218674 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ba52da5bd627c442325f0632fb62dea7b603c32deb2b2e811019537b8382141" Dec 04 17:49:17 crc kubenswrapper[4631]: I1204 17:49:17.633528 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-2vnfm-config-mbvtg"] Dec 04 17:49:17 crc kubenswrapper[4631]: I1204 17:49:17.644207 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-2vnfm-config-mbvtg"] Dec 04 17:49:18 crc kubenswrapper[4631]: I1204 17:49:18.252454 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db78b05a-3d74-4e26-8e36-1d72c07da424" path="/var/lib/kubelet/pods/db78b05a-3d74-4e26-8e36-1d72c07da424/volumes" Dec 04 17:49:18 crc kubenswrapper[4631]: I1204 17:49:18.253036 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-2vnfm" Dec 04 17:49:19 crc kubenswrapper[4631]: I1204 17:49:19.625666 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:49:19 crc kubenswrapper[4631]: I1204 17:49:19.953616 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.110107 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-wgt4k"] Dec 04 17:49:22 crc kubenswrapper[4631]: E1204 17:49:22.110442 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db78b05a-3d74-4e26-8e36-1d72c07da424" containerName="ovn-config" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.110454 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="db78b05a-3d74-4e26-8e36-1d72c07da424" containerName="ovn-config" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.110601 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="db78b05a-3d74-4e26-8e36-1d72c07da424" containerName="ovn-config" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.111067 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-wgt4k" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.128602 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-wgt4k"] Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.256300 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cn62p\" (UniqueName: \"kubernetes.io/projected/d06df4fc-ddce-4c76-b612-6e94abc94c9d-kube-api-access-cn62p\") pod \"barbican-db-create-wgt4k\" (UID: \"d06df4fc-ddce-4c76-b612-6e94abc94c9d\") " pod="openstack/barbican-db-create-wgt4k" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.256350 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d06df4fc-ddce-4c76-b612-6e94abc94c9d-operator-scripts\") pod \"barbican-db-create-wgt4k\" (UID: \"d06df4fc-ddce-4c76-b612-6e94abc94c9d\") " pod="openstack/barbican-db-create-wgt4k" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.341854 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-6029-account-create-update-hx8lr"] Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.347954 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6029-account-create-update-hx8lr" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.355302 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.361629 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cn62p\" (UniqueName: \"kubernetes.io/projected/d06df4fc-ddce-4c76-b612-6e94abc94c9d-kube-api-access-cn62p\") pod \"barbican-db-create-wgt4k\" (UID: \"d06df4fc-ddce-4c76-b612-6e94abc94c9d\") " pod="openstack/barbican-db-create-wgt4k" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.361965 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d06df4fc-ddce-4c76-b612-6e94abc94c9d-operator-scripts\") pod \"barbican-db-create-wgt4k\" (UID: \"d06df4fc-ddce-4c76-b612-6e94abc94c9d\") " pod="openstack/barbican-db-create-wgt4k" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.369099 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d06df4fc-ddce-4c76-b612-6e94abc94c9d-operator-scripts\") pod \"barbican-db-create-wgt4k\" (UID: \"d06df4fc-ddce-4c76-b612-6e94abc94c9d\") " pod="openstack/barbican-db-create-wgt4k" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.370887 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-6029-account-create-update-hx8lr"] Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.402621 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cn62p\" (UniqueName: \"kubernetes.io/projected/d06df4fc-ddce-4c76-b612-6e94abc94c9d-kube-api-access-cn62p\") pod \"barbican-db-create-wgt4k\" (UID: \"d06df4fc-ddce-4c76-b612-6e94abc94c9d\") " pod="openstack/barbican-db-create-wgt4k" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.433700 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-wgt4k" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.464970 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtdbs\" (UniqueName: \"kubernetes.io/projected/4f09451c-0497-468a-a555-df590ba4e739-kube-api-access-mtdbs\") pod \"cinder-6029-account-create-update-hx8lr\" (UID: \"4f09451c-0497-468a-a555-df590ba4e739\") " pod="openstack/cinder-6029-account-create-update-hx8lr" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.465059 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f09451c-0497-468a-a555-df590ba4e739-operator-scripts\") pod \"cinder-6029-account-create-update-hx8lr\" (UID: \"4f09451c-0497-468a-a555-df590ba4e739\") " pod="openstack/cinder-6029-account-create-update-hx8lr" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.482041 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-927b-account-create-update-7jqb5"] Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.484127 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-927b-account-create-update-7jqb5" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.493015 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.506188 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-gg9j7"] Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.508929 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-gg9j7" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.566931 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtdbs\" (UniqueName: \"kubernetes.io/projected/4f09451c-0497-468a-a555-df590ba4e739-kube-api-access-mtdbs\") pod \"cinder-6029-account-create-update-hx8lr\" (UID: \"4f09451c-0497-468a-a555-df590ba4e739\") " pod="openstack/cinder-6029-account-create-update-hx8lr" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.567026 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f09451c-0497-468a-a555-df590ba4e739-operator-scripts\") pod \"cinder-6029-account-create-update-hx8lr\" (UID: \"4f09451c-0497-468a-a555-df590ba4e739\") " pod="openstack/cinder-6029-account-create-update-hx8lr" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.567855 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f09451c-0497-468a-a555-df590ba4e739-operator-scripts\") pod \"cinder-6029-account-create-update-hx8lr\" (UID: \"4f09451c-0497-468a-a555-df590ba4e739\") " pod="openstack/cinder-6029-account-create-update-hx8lr" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.572795 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-gg9j7"] Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.595406 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-927b-account-create-update-7jqb5"] Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.607673 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtdbs\" (UniqueName: \"kubernetes.io/projected/4f09451c-0497-468a-a555-df590ba4e739-kube-api-access-mtdbs\") pod \"cinder-6029-account-create-update-hx8lr\" (UID: \"4f09451c-0497-468a-a555-df590ba4e739\") " pod="openstack/cinder-6029-account-create-update-hx8lr" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.668352 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cd3284d-eef1-46b8-a5eb-88c0a1772c61-operator-scripts\") pod \"cinder-db-create-gg9j7\" (UID: \"3cd3284d-eef1-46b8-a5eb-88c0a1772c61\") " pod="openstack/cinder-db-create-gg9j7" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.668439 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjhsl\" (UniqueName: \"kubernetes.io/projected/3cd3284d-eef1-46b8-a5eb-88c0a1772c61-kube-api-access-zjhsl\") pod \"cinder-db-create-gg9j7\" (UID: \"3cd3284d-eef1-46b8-a5eb-88c0a1772c61\") " pod="openstack/cinder-db-create-gg9j7" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.668470 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d740232-221b-4667-bb8e-995d626b74ce-operator-scripts\") pod \"barbican-927b-account-create-update-7jqb5\" (UID: \"2d740232-221b-4667-bb8e-995d626b74ce\") " pod="openstack/barbican-927b-account-create-update-7jqb5" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.668510 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l25sw\" (UniqueName: \"kubernetes.io/projected/2d740232-221b-4667-bb8e-995d626b74ce-kube-api-access-l25sw\") pod \"barbican-927b-account-create-update-7jqb5\" (UID: \"2d740232-221b-4667-bb8e-995d626b74ce\") " pod="openstack/barbican-927b-account-create-update-7jqb5" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.702772 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6029-account-create-update-hx8lr" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.718949 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-qgrwj"] Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.720066 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qgrwj" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.727457 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-sxcqt" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.727475 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.734857 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.735102 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-x45nc"] Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.736408 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-x45nc" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.736661 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.745443 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-3f0f-account-create-update-8ptr9"] Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.746506 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3f0f-account-create-update-8ptr9" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.749056 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.758865 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-qgrwj"] Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.768503 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-x45nc"] Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.770326 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cd3284d-eef1-46b8-a5eb-88c0a1772c61-operator-scripts\") pod \"cinder-db-create-gg9j7\" (UID: \"3cd3284d-eef1-46b8-a5eb-88c0a1772c61\") " pod="openstack/cinder-db-create-gg9j7" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.770383 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjhsl\" (UniqueName: \"kubernetes.io/projected/3cd3284d-eef1-46b8-a5eb-88c0a1772c61-kube-api-access-zjhsl\") pod \"cinder-db-create-gg9j7\" (UID: \"3cd3284d-eef1-46b8-a5eb-88c0a1772c61\") " pod="openstack/cinder-db-create-gg9j7" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.770407 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d740232-221b-4667-bb8e-995d626b74ce-operator-scripts\") pod \"barbican-927b-account-create-update-7jqb5\" (UID: \"2d740232-221b-4667-bb8e-995d626b74ce\") " pod="openstack/barbican-927b-account-create-update-7jqb5" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.770441 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l25sw\" (UniqueName: \"kubernetes.io/projected/2d740232-221b-4667-bb8e-995d626b74ce-kube-api-access-l25sw\") pod \"barbican-927b-account-create-update-7jqb5\" (UID: \"2d740232-221b-4667-bb8e-995d626b74ce\") " pod="openstack/barbican-927b-account-create-update-7jqb5" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.771337 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cd3284d-eef1-46b8-a5eb-88c0a1772c61-operator-scripts\") pod \"cinder-db-create-gg9j7\" (UID: \"3cd3284d-eef1-46b8-a5eb-88c0a1772c61\") " pod="openstack/cinder-db-create-gg9j7" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.771931 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d740232-221b-4667-bb8e-995d626b74ce-operator-scripts\") pod \"barbican-927b-account-create-update-7jqb5\" (UID: \"2d740232-221b-4667-bb8e-995d626b74ce\") " pod="openstack/barbican-927b-account-create-update-7jqb5" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.777562 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-3f0f-account-create-update-8ptr9"] Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.815617 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjhsl\" (UniqueName: \"kubernetes.io/projected/3cd3284d-eef1-46b8-a5eb-88c0a1772c61-kube-api-access-zjhsl\") pod \"cinder-db-create-gg9j7\" (UID: \"3cd3284d-eef1-46b8-a5eb-88c0a1772c61\") " pod="openstack/cinder-db-create-gg9j7" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.827396 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-gg9j7" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.831930 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l25sw\" (UniqueName: \"kubernetes.io/projected/2d740232-221b-4667-bb8e-995d626b74ce-kube-api-access-l25sw\") pod \"barbican-927b-account-create-update-7jqb5\" (UID: \"2d740232-221b-4667-bb8e-995d626b74ce\") " pod="openstack/barbican-927b-account-create-update-7jqb5" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.873568 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b827db3-b1ee-4b2d-8e57-807fc9449549-operator-scripts\") pod \"neutron-db-create-x45nc\" (UID: \"9b827db3-b1ee-4b2d-8e57-807fc9449549\") " pod="openstack/neutron-db-create-x45nc" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.873665 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f5756a6-d59c-4653-ac3a-8ba1fd91862b-config-data\") pod \"keystone-db-sync-qgrwj\" (UID: \"8f5756a6-d59c-4653-ac3a-8ba1fd91862b\") " pod="openstack/keystone-db-sync-qgrwj" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.873729 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mqhk\" (UniqueName: \"kubernetes.io/projected/9b827db3-b1ee-4b2d-8e57-807fc9449549-kube-api-access-8mqhk\") pod \"neutron-db-create-x45nc\" (UID: \"9b827db3-b1ee-4b2d-8e57-807fc9449549\") " pod="openstack/neutron-db-create-x45nc" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.873753 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcg5k\" (UniqueName: \"kubernetes.io/projected/8f5756a6-d59c-4653-ac3a-8ba1fd91862b-kube-api-access-tcg5k\") pod \"keystone-db-sync-qgrwj\" (UID: \"8f5756a6-d59c-4653-ac3a-8ba1fd91862b\") " pod="openstack/keystone-db-sync-qgrwj" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.873782 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ea88fc7-43b7-4b93-b9f0-e6833868a1cf-operator-scripts\") pod \"neutron-3f0f-account-create-update-8ptr9\" (UID: \"2ea88fc7-43b7-4b93-b9f0-e6833868a1cf\") " pod="openstack/neutron-3f0f-account-create-update-8ptr9" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.873833 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5756a6-d59c-4653-ac3a-8ba1fd91862b-combined-ca-bundle\") pod \"keystone-db-sync-qgrwj\" (UID: \"8f5756a6-d59c-4653-ac3a-8ba1fd91862b\") " pod="openstack/keystone-db-sync-qgrwj" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.873871 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwb75\" (UniqueName: \"kubernetes.io/projected/2ea88fc7-43b7-4b93-b9f0-e6833868a1cf-kube-api-access-mwb75\") pod \"neutron-3f0f-account-create-update-8ptr9\" (UID: \"2ea88fc7-43b7-4b93-b9f0-e6833868a1cf\") " pod="openstack/neutron-3f0f-account-create-update-8ptr9" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.894602 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.975441 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwb75\" (UniqueName: \"kubernetes.io/projected/2ea88fc7-43b7-4b93-b9f0-e6833868a1cf-kube-api-access-mwb75\") pod \"neutron-3f0f-account-create-update-8ptr9\" (UID: \"2ea88fc7-43b7-4b93-b9f0-e6833868a1cf\") " pod="openstack/neutron-3f0f-account-create-update-8ptr9" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.975492 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b827db3-b1ee-4b2d-8e57-807fc9449549-operator-scripts\") pod \"neutron-db-create-x45nc\" (UID: \"9b827db3-b1ee-4b2d-8e57-807fc9449549\") " pod="openstack/neutron-db-create-x45nc" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.975536 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f5756a6-d59c-4653-ac3a-8ba1fd91862b-config-data\") pod \"keystone-db-sync-qgrwj\" (UID: \"8f5756a6-d59c-4653-ac3a-8ba1fd91862b\") " pod="openstack/keystone-db-sync-qgrwj" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.975596 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mqhk\" (UniqueName: \"kubernetes.io/projected/9b827db3-b1ee-4b2d-8e57-807fc9449549-kube-api-access-8mqhk\") pod \"neutron-db-create-x45nc\" (UID: \"9b827db3-b1ee-4b2d-8e57-807fc9449549\") " pod="openstack/neutron-db-create-x45nc" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.975619 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcg5k\" (UniqueName: \"kubernetes.io/projected/8f5756a6-d59c-4653-ac3a-8ba1fd91862b-kube-api-access-tcg5k\") pod \"keystone-db-sync-qgrwj\" (UID: \"8f5756a6-d59c-4653-ac3a-8ba1fd91862b\") " pod="openstack/keystone-db-sync-qgrwj" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.975646 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ea88fc7-43b7-4b93-b9f0-e6833868a1cf-operator-scripts\") pod \"neutron-3f0f-account-create-update-8ptr9\" (UID: \"2ea88fc7-43b7-4b93-b9f0-e6833868a1cf\") " pod="openstack/neutron-3f0f-account-create-update-8ptr9" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.975690 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5756a6-d59c-4653-ac3a-8ba1fd91862b-combined-ca-bundle\") pod \"keystone-db-sync-qgrwj\" (UID: \"8f5756a6-d59c-4653-ac3a-8ba1fd91862b\") " pod="openstack/keystone-db-sync-qgrwj" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.977574 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ea88fc7-43b7-4b93-b9f0-e6833868a1cf-operator-scripts\") pod \"neutron-3f0f-account-create-update-8ptr9\" (UID: \"2ea88fc7-43b7-4b93-b9f0-e6833868a1cf\") " pod="openstack/neutron-3f0f-account-create-update-8ptr9" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.977575 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b827db3-b1ee-4b2d-8e57-807fc9449549-operator-scripts\") pod \"neutron-db-create-x45nc\" (UID: \"9b827db3-b1ee-4b2d-8e57-807fc9449549\") " pod="openstack/neutron-db-create-x45nc" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.978887 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-7lrtg"] Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.979184 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-7lrtg" podUID="5b28b17b-31f2-49c7-b089-dfd0275bc8ba" containerName="dnsmasq-dns" containerID="cri-o://65fd63a43a64bc28cba0b6c37c0567b947410004d363d14ef4a6cfb7256d04f6" gracePeriod=10 Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.992989 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5756a6-d59c-4653-ac3a-8ba1fd91862b-combined-ca-bundle\") pod \"keystone-db-sync-qgrwj\" (UID: \"8f5756a6-d59c-4653-ac3a-8ba1fd91862b\") " pod="openstack/keystone-db-sync-qgrwj" Dec 04 17:49:22 crc kubenswrapper[4631]: I1204 17:49:22.994470 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f5756a6-d59c-4653-ac3a-8ba1fd91862b-config-data\") pod \"keystone-db-sync-qgrwj\" (UID: \"8f5756a6-d59c-4653-ac3a-8ba1fd91862b\") " pod="openstack/keystone-db-sync-qgrwj" Dec 04 17:49:23 crc kubenswrapper[4631]: I1204 17:49:23.024987 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwb75\" (UniqueName: \"kubernetes.io/projected/2ea88fc7-43b7-4b93-b9f0-e6833868a1cf-kube-api-access-mwb75\") pod \"neutron-3f0f-account-create-update-8ptr9\" (UID: \"2ea88fc7-43b7-4b93-b9f0-e6833868a1cf\") " pod="openstack/neutron-3f0f-account-create-update-8ptr9" Dec 04 17:49:23 crc kubenswrapper[4631]: I1204 17:49:23.044398 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mqhk\" (UniqueName: \"kubernetes.io/projected/9b827db3-b1ee-4b2d-8e57-807fc9449549-kube-api-access-8mqhk\") pod \"neutron-db-create-x45nc\" (UID: \"9b827db3-b1ee-4b2d-8e57-807fc9449549\") " pod="openstack/neutron-db-create-x45nc" Dec 04 17:49:23 crc kubenswrapper[4631]: I1204 17:49:23.050642 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-x45nc" Dec 04 17:49:23 crc kubenswrapper[4631]: I1204 17:49:23.054707 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcg5k\" (UniqueName: \"kubernetes.io/projected/8f5756a6-d59c-4653-ac3a-8ba1fd91862b-kube-api-access-tcg5k\") pod \"keystone-db-sync-qgrwj\" (UID: \"8f5756a6-d59c-4653-ac3a-8ba1fd91862b\") " pod="openstack/keystone-db-sync-qgrwj" Dec 04 17:49:23 crc kubenswrapper[4631]: I1204 17:49:23.069159 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3f0f-account-create-update-8ptr9" Dec 04 17:49:23 crc kubenswrapper[4631]: I1204 17:49:23.117779 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-927b-account-create-update-7jqb5" Dec 04 17:49:23 crc kubenswrapper[4631]: I1204 17:49:23.276347 4631 generic.go:334] "Generic (PLEG): container finished" podID="5b28b17b-31f2-49c7-b089-dfd0275bc8ba" containerID="65fd63a43a64bc28cba0b6c37c0567b947410004d363d14ef4a6cfb7256d04f6" exitCode=0 Dec 04 17:49:23 crc kubenswrapper[4631]: I1204 17:49:23.276674 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-7lrtg" event={"ID":"5b28b17b-31f2-49c7-b089-dfd0275bc8ba","Type":"ContainerDied","Data":"65fd63a43a64bc28cba0b6c37c0567b947410004d363d14ef4a6cfb7256d04f6"} Dec 04 17:49:23 crc kubenswrapper[4631]: I1204 17:49:23.337507 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qgrwj" Dec 04 17:49:25 crc kubenswrapper[4631]: I1204 17:49:25.912698 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-7lrtg" podUID="5b28b17b-31f2-49c7-b089-dfd0275bc8ba" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Dec 04 17:49:30 crc kubenswrapper[4631]: E1204 17:49:30.848432 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Dec 04 17:49:30 crc kubenswrapper[4631]: E1204 17:49:30.849317 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zf9lq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-4hqks_openstack(30425aae-4c9c-445c-8d10-d4e5874fda30): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:49:30 crc kubenswrapper[4631]: E1204 17:49:30.850562 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-4hqks" podUID="30425aae-4c9c-445c-8d10-d4e5874fda30" Dec 04 17:49:30 crc kubenswrapper[4631]: I1204 17:49:30.912536 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-7lrtg" podUID="5b28b17b-31f2-49c7-b089-dfd0275bc8ba" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.274838 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.365210 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-7lrtg" Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.365220 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-7lrtg" event={"ID":"5b28b17b-31f2-49c7-b089-dfd0275bc8ba","Type":"ContainerDied","Data":"06d3b98ecbb7e4333d8f78a911719ed995a8013f5eb8c0426a257c0a430f362d"} Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.365286 4631 scope.go:117] "RemoveContainer" containerID="65fd63a43a64bc28cba0b6c37c0567b947410004d363d14ef4a6cfb7256d04f6" Dec 04 17:49:31 crc kubenswrapper[4631]: E1204 17:49:31.366939 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-4hqks" podUID="30425aae-4c9c-445c-8d10-d4e5874fda30" Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.397410 4631 scope.go:117] "RemoveContainer" containerID="fc250b501ffbf552b7a5b933df5170dae19ce0b17a4f0ec1a3a20b8720f6c07e" Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.432120 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xqc5k\" (UniqueName: \"kubernetes.io/projected/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-kube-api-access-xqc5k\") pod \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.432457 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-ovsdbserver-nb\") pod \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.432508 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-config\") pod \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.432553 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-ovsdbserver-sb\") pod \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.432583 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-dns-svc\") pod \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\" (UID: \"5b28b17b-31f2-49c7-b089-dfd0275bc8ba\") " Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.439230 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-kube-api-access-xqc5k" (OuterVolumeSpecName: "kube-api-access-xqc5k") pod "5b28b17b-31f2-49c7-b089-dfd0275bc8ba" (UID: "5b28b17b-31f2-49c7-b089-dfd0275bc8ba"). InnerVolumeSpecName "kube-api-access-xqc5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.464616 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-6029-account-create-update-hx8lr"] Dec 04 17:49:31 crc kubenswrapper[4631]: W1204 17:49:31.470458 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f09451c_0497_468a_a555_df590ba4e739.slice/crio-422c00d04f73a6a7b4e10bfeaa2af16fb37ab6a5fde5c42a9fa960e7f7c80bd3 WatchSource:0}: Error finding container 422c00d04f73a6a7b4e10bfeaa2af16fb37ab6a5fde5c42a9fa960e7f7c80bd3: Status 404 returned error can't find the container with id 422c00d04f73a6a7b4e10bfeaa2af16fb37ab6a5fde5c42a9fa960e7f7c80bd3 Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.480998 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5b28b17b-31f2-49c7-b089-dfd0275bc8ba" (UID: "5b28b17b-31f2-49c7-b089-dfd0275bc8ba"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.481904 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-config" (OuterVolumeSpecName: "config") pod "5b28b17b-31f2-49c7-b089-dfd0275bc8ba" (UID: "5b28b17b-31f2-49c7-b089-dfd0275bc8ba"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.484595 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5b28b17b-31f2-49c7-b089-dfd0275bc8ba" (UID: "5b28b17b-31f2-49c7-b089-dfd0275bc8ba"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.489735 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5b28b17b-31f2-49c7-b089-dfd0275bc8ba" (UID: "5b28b17b-31f2-49c7-b089-dfd0275bc8ba"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.535754 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xqc5k\" (UniqueName: \"kubernetes.io/projected/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-kube-api-access-xqc5k\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.535780 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.535793 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.535805 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.535814 4631 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b28b17b-31f2-49c7-b089-dfd0275bc8ba-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.681490 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-gg9j7"] Dec 04 17:49:31 crc kubenswrapper[4631]: W1204 17:49:31.684044 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d740232_221b_4667_bb8e_995d626b74ce.slice/crio-31338ba979f9dfc7fb5635f01539af012c08409d9f314b959a72affe13f8de82 WatchSource:0}: Error finding container 31338ba979f9dfc7fb5635f01539af012c08409d9f314b959a72affe13f8de82: Status 404 returned error can't find the container with id 31338ba979f9dfc7fb5635f01539af012c08409d9f314b959a72affe13f8de82 Dec 04 17:49:31 crc kubenswrapper[4631]: W1204 17:49:31.708846 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b827db3_b1ee_4b2d_8e57_807fc9449549.slice/crio-84a53d2358866762e351db694f6a708319b5fceba08e71f11a9bbe01022337d7 WatchSource:0}: Error finding container 84a53d2358866762e351db694f6a708319b5fceba08e71f11a9bbe01022337d7: Status 404 returned error can't find the container with id 84a53d2358866762e351db694f6a708319b5fceba08e71f11a9bbe01022337d7 Dec 04 17:49:31 crc kubenswrapper[4631]: W1204 17:49:31.710596 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd06df4fc_ddce_4c76_b612_6e94abc94c9d.slice/crio-a8b8671094f9c1d66990dd84edfca9b4fbce60f80391bdb1d193052a0ab18264 WatchSource:0}: Error finding container a8b8671094f9c1d66990dd84edfca9b4fbce60f80391bdb1d193052a0ab18264: Status 404 returned error can't find the container with id a8b8671094f9c1d66990dd84edfca9b4fbce60f80391bdb1d193052a0ab18264 Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.713325 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-3f0f-account-create-update-8ptr9"] Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.720841 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-927b-account-create-update-7jqb5"] Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.730887 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-wgt4k"] Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.756131 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-x45nc"] Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.778624 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-qgrwj"] Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.799805 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-7lrtg"] Dec 04 17:49:31 crc kubenswrapper[4631]: I1204 17:49:31.820909 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-7lrtg"] Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.251944 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b28b17b-31f2-49c7-b089-dfd0275bc8ba" path="/var/lib/kubelet/pods/5b28b17b-31f2-49c7-b089-dfd0275bc8ba/volumes" Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.373449 4631 generic.go:334] "Generic (PLEG): container finished" podID="3cd3284d-eef1-46b8-a5eb-88c0a1772c61" containerID="3eada3c14394596d6513d90071095938a7b659eff3460ae64a5eda9a1381b44f" exitCode=0 Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.373545 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-gg9j7" event={"ID":"3cd3284d-eef1-46b8-a5eb-88c0a1772c61","Type":"ContainerDied","Data":"3eada3c14394596d6513d90071095938a7b659eff3460ae64a5eda9a1381b44f"} Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.373574 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-gg9j7" event={"ID":"3cd3284d-eef1-46b8-a5eb-88c0a1772c61","Type":"ContainerStarted","Data":"89b2cc44fa6235d9a860531d4694ee8a74ddeba41171259159db6351d137518a"} Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.379849 4631 generic.go:334] "Generic (PLEG): container finished" podID="d06df4fc-ddce-4c76-b612-6e94abc94c9d" containerID="a028cbcabf035c32e5c2316924df66f4c7a2ccdc9c8060591ec1fa6e2f769834" exitCode=0 Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.379892 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-wgt4k" event={"ID":"d06df4fc-ddce-4c76-b612-6e94abc94c9d","Type":"ContainerDied","Data":"a028cbcabf035c32e5c2316924df66f4c7a2ccdc9c8060591ec1fa6e2f769834"} Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.379926 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-wgt4k" event={"ID":"d06df4fc-ddce-4c76-b612-6e94abc94c9d","Type":"ContainerStarted","Data":"a8b8671094f9c1d66990dd84edfca9b4fbce60f80391bdb1d193052a0ab18264"} Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.381731 4631 generic.go:334] "Generic (PLEG): container finished" podID="2ea88fc7-43b7-4b93-b9f0-e6833868a1cf" containerID="02556b9e0072dcfb8b14a61e603273eef0d200a7a1b32a0fc699dce2da1dea6d" exitCode=0 Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.381796 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-3f0f-account-create-update-8ptr9" event={"ID":"2ea88fc7-43b7-4b93-b9f0-e6833868a1cf","Type":"ContainerDied","Data":"02556b9e0072dcfb8b14a61e603273eef0d200a7a1b32a0fc699dce2da1dea6d"} Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.381821 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-3f0f-account-create-update-8ptr9" event={"ID":"2ea88fc7-43b7-4b93-b9f0-e6833868a1cf","Type":"ContainerStarted","Data":"14ac9cd4fb1ebac066a10f30a1ee7b0992384cd687fa020faf277fe8d2c289b4"} Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.383560 4631 generic.go:334] "Generic (PLEG): container finished" podID="4f09451c-0497-468a-a555-df590ba4e739" containerID="d1a4125258045a3ddd27d1b6170ec77289ffad8f8e6f1930c230bcf7e9d7a895" exitCode=0 Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.383598 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-6029-account-create-update-hx8lr" event={"ID":"4f09451c-0497-468a-a555-df590ba4e739","Type":"ContainerDied","Data":"d1a4125258045a3ddd27d1b6170ec77289ffad8f8e6f1930c230bcf7e9d7a895"} Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.383612 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-6029-account-create-update-hx8lr" event={"ID":"4f09451c-0497-468a-a555-df590ba4e739","Type":"ContainerStarted","Data":"422c00d04f73a6a7b4e10bfeaa2af16fb37ab6a5fde5c42a9fa960e7f7c80bd3"} Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.385516 4631 generic.go:334] "Generic (PLEG): container finished" podID="2d740232-221b-4667-bb8e-995d626b74ce" containerID="5c5a2c32dcf6b8c47d005af1666096a25b065ea8558929a026282cc710b10523" exitCode=0 Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.385549 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-927b-account-create-update-7jqb5" event={"ID":"2d740232-221b-4667-bb8e-995d626b74ce","Type":"ContainerDied","Data":"5c5a2c32dcf6b8c47d005af1666096a25b065ea8558929a026282cc710b10523"} Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.386077 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-927b-account-create-update-7jqb5" event={"ID":"2d740232-221b-4667-bb8e-995d626b74ce","Type":"ContainerStarted","Data":"31338ba979f9dfc7fb5635f01539af012c08409d9f314b959a72affe13f8de82"} Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.392882 4631 generic.go:334] "Generic (PLEG): container finished" podID="9b827db3-b1ee-4b2d-8e57-807fc9449549" containerID="eaec7493df6ab934e4f6263a57c742760acde1b22328c17865cd503d36376c6c" exitCode=0 Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.393037 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-x45nc" event={"ID":"9b827db3-b1ee-4b2d-8e57-807fc9449549","Type":"ContainerDied","Data":"eaec7493df6ab934e4f6263a57c742760acde1b22328c17865cd503d36376c6c"} Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.393064 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-x45nc" event={"ID":"9b827db3-b1ee-4b2d-8e57-807fc9449549","Type":"ContainerStarted","Data":"84a53d2358866762e351db694f6a708319b5fceba08e71f11a9bbe01022337d7"} Dec 04 17:49:32 crc kubenswrapper[4631]: I1204 17:49:32.395668 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qgrwj" event={"ID":"8f5756a6-d59c-4653-ac3a-8ba1fd91862b","Type":"ContainerStarted","Data":"7fd080c6b890ea8ca7541ff045f02ea62878b1877826cd167692b0e71d771657"} Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.023879 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.024245 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.182285 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-wgt4k" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.191310 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-gg9j7" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.204834 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6029-account-create-update-hx8lr" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.211408 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-x45nc" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.222120 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3f0f-account-create-update-8ptr9" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.228109 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-927b-account-create-update-7jqb5" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.312383 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b827db3-b1ee-4b2d-8e57-807fc9449549-operator-scripts\") pod \"9b827db3-b1ee-4b2d-8e57-807fc9449549\" (UID: \"9b827db3-b1ee-4b2d-8e57-807fc9449549\") " Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.312433 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwb75\" (UniqueName: \"kubernetes.io/projected/2ea88fc7-43b7-4b93-b9f0-e6833868a1cf-kube-api-access-mwb75\") pod \"2ea88fc7-43b7-4b93-b9f0-e6833868a1cf\" (UID: \"2ea88fc7-43b7-4b93-b9f0-e6833868a1cf\") " Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.312480 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cd3284d-eef1-46b8-a5eb-88c0a1772c61-operator-scripts\") pod \"3cd3284d-eef1-46b8-a5eb-88c0a1772c61\" (UID: \"3cd3284d-eef1-46b8-a5eb-88c0a1772c61\") " Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.312512 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cn62p\" (UniqueName: \"kubernetes.io/projected/d06df4fc-ddce-4c76-b612-6e94abc94c9d-kube-api-access-cn62p\") pod \"d06df4fc-ddce-4c76-b612-6e94abc94c9d\" (UID: \"d06df4fc-ddce-4c76-b612-6e94abc94c9d\") " Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.312545 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f09451c-0497-468a-a555-df590ba4e739-operator-scripts\") pod \"4f09451c-0497-468a-a555-df590ba4e739\" (UID: \"4f09451c-0497-468a-a555-df590ba4e739\") " Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.312564 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ea88fc7-43b7-4b93-b9f0-e6833868a1cf-operator-scripts\") pod \"2ea88fc7-43b7-4b93-b9f0-e6833868a1cf\" (UID: \"2ea88fc7-43b7-4b93-b9f0-e6833868a1cf\") " Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.312629 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d06df4fc-ddce-4c76-b612-6e94abc94c9d-operator-scripts\") pod \"d06df4fc-ddce-4c76-b612-6e94abc94c9d\" (UID: \"d06df4fc-ddce-4c76-b612-6e94abc94c9d\") " Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.312660 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtdbs\" (UniqueName: \"kubernetes.io/projected/4f09451c-0497-468a-a555-df590ba4e739-kube-api-access-mtdbs\") pod \"4f09451c-0497-468a-a555-df590ba4e739\" (UID: \"4f09451c-0497-468a-a555-df590ba4e739\") " Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.312712 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjhsl\" (UniqueName: \"kubernetes.io/projected/3cd3284d-eef1-46b8-a5eb-88c0a1772c61-kube-api-access-zjhsl\") pod \"3cd3284d-eef1-46b8-a5eb-88c0a1772c61\" (UID: \"3cd3284d-eef1-46b8-a5eb-88c0a1772c61\") " Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.312730 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d740232-221b-4667-bb8e-995d626b74ce-operator-scripts\") pod \"2d740232-221b-4667-bb8e-995d626b74ce\" (UID: \"2d740232-221b-4667-bb8e-995d626b74ce\") " Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.312750 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8mqhk\" (UniqueName: \"kubernetes.io/projected/9b827db3-b1ee-4b2d-8e57-807fc9449549-kube-api-access-8mqhk\") pod \"9b827db3-b1ee-4b2d-8e57-807fc9449549\" (UID: \"9b827db3-b1ee-4b2d-8e57-807fc9449549\") " Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.312784 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l25sw\" (UniqueName: \"kubernetes.io/projected/2d740232-221b-4667-bb8e-995d626b74ce-kube-api-access-l25sw\") pod \"2d740232-221b-4667-bb8e-995d626b74ce\" (UID: \"2d740232-221b-4667-bb8e-995d626b74ce\") " Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.313187 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b827db3-b1ee-4b2d-8e57-807fc9449549-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9b827db3-b1ee-4b2d-8e57-807fc9449549" (UID: "9b827db3-b1ee-4b2d-8e57-807fc9449549"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.313212 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f09451c-0497-468a-a555-df590ba4e739-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4f09451c-0497-468a-a555-df590ba4e739" (UID: "4f09451c-0497-468a-a555-df590ba4e739"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.313236 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cd3284d-eef1-46b8-a5eb-88c0a1772c61-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3cd3284d-eef1-46b8-a5eb-88c0a1772c61" (UID: "3cd3284d-eef1-46b8-a5eb-88c0a1772c61"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.313702 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ea88fc7-43b7-4b93-b9f0-e6833868a1cf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2ea88fc7-43b7-4b93-b9f0-e6833868a1cf" (UID: "2ea88fc7-43b7-4b93-b9f0-e6833868a1cf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.313725 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d06df4fc-ddce-4c76-b612-6e94abc94c9d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d06df4fc-ddce-4c76-b612-6e94abc94c9d" (UID: "d06df4fc-ddce-4c76-b612-6e94abc94c9d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.314138 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d740232-221b-4667-bb8e-995d626b74ce-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2d740232-221b-4667-bb8e-995d626b74ce" (UID: "2d740232-221b-4667-bb8e-995d626b74ce"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.316241 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cd3284d-eef1-46b8-a5eb-88c0a1772c61-kube-api-access-zjhsl" (OuterVolumeSpecName: "kube-api-access-zjhsl") pod "3cd3284d-eef1-46b8-a5eb-88c0a1772c61" (UID: "3cd3284d-eef1-46b8-a5eb-88c0a1772c61"). InnerVolumeSpecName "kube-api-access-zjhsl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.316880 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b827db3-b1ee-4b2d-8e57-807fc9449549-kube-api-access-8mqhk" (OuterVolumeSpecName: "kube-api-access-8mqhk") pod "9b827db3-b1ee-4b2d-8e57-807fc9449549" (UID: "9b827db3-b1ee-4b2d-8e57-807fc9449549"). InnerVolumeSpecName "kube-api-access-8mqhk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.317731 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d740232-221b-4667-bb8e-995d626b74ce-kube-api-access-l25sw" (OuterVolumeSpecName: "kube-api-access-l25sw") pod "2d740232-221b-4667-bb8e-995d626b74ce" (UID: "2d740232-221b-4667-bb8e-995d626b74ce"). InnerVolumeSpecName "kube-api-access-l25sw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.317748 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ea88fc7-43b7-4b93-b9f0-e6833868a1cf-kube-api-access-mwb75" (OuterVolumeSpecName: "kube-api-access-mwb75") pod "2ea88fc7-43b7-4b93-b9f0-e6833868a1cf" (UID: "2ea88fc7-43b7-4b93-b9f0-e6833868a1cf"). InnerVolumeSpecName "kube-api-access-mwb75". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.319085 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f09451c-0497-468a-a555-df590ba4e739-kube-api-access-mtdbs" (OuterVolumeSpecName: "kube-api-access-mtdbs") pod "4f09451c-0497-468a-a555-df590ba4e739" (UID: "4f09451c-0497-468a-a555-df590ba4e739"). InnerVolumeSpecName "kube-api-access-mtdbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.319576 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d06df4fc-ddce-4c76-b612-6e94abc94c9d-kube-api-access-cn62p" (OuterVolumeSpecName: "kube-api-access-cn62p") pod "d06df4fc-ddce-4c76-b612-6e94abc94c9d" (UID: "d06df4fc-ddce-4c76-b612-6e94abc94c9d"). InnerVolumeSpecName "kube-api-access-cn62p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.414035 4631 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d06df4fc-ddce-4c76-b612-6e94abc94c9d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.414280 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtdbs\" (UniqueName: \"kubernetes.io/projected/4f09451c-0497-468a-a555-df590ba4e739-kube-api-access-mtdbs\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.414348 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjhsl\" (UniqueName: \"kubernetes.io/projected/3cd3284d-eef1-46b8-a5eb-88c0a1772c61-kube-api-access-zjhsl\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.414463 4631 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d740232-221b-4667-bb8e-995d626b74ce-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.414529 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8mqhk\" (UniqueName: \"kubernetes.io/projected/9b827db3-b1ee-4b2d-8e57-807fc9449549-kube-api-access-8mqhk\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.414595 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l25sw\" (UniqueName: \"kubernetes.io/projected/2d740232-221b-4667-bb8e-995d626b74ce-kube-api-access-l25sw\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.414655 4631 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b827db3-b1ee-4b2d-8e57-807fc9449549-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.414915 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwb75\" (UniqueName: \"kubernetes.io/projected/2ea88fc7-43b7-4b93-b9f0-e6833868a1cf-kube-api-access-mwb75\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.414982 4631 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cd3284d-eef1-46b8-a5eb-88c0a1772c61-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.415042 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cn62p\" (UniqueName: \"kubernetes.io/projected/d06df4fc-ddce-4c76-b612-6e94abc94c9d-kube-api-access-cn62p\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.415109 4631 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f09451c-0497-468a-a555-df590ba4e739-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.415172 4631 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ea88fc7-43b7-4b93-b9f0-e6833868a1cf-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.439972 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-6029-account-create-update-hx8lr" event={"ID":"4f09451c-0497-468a-a555-df590ba4e739","Type":"ContainerDied","Data":"422c00d04f73a6a7b4e10bfeaa2af16fb37ab6a5fde5c42a9fa960e7f7c80bd3"} Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.440033 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="422c00d04f73a6a7b4e10bfeaa2af16fb37ab6a5fde5c42a9fa960e7f7c80bd3" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.440116 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6029-account-create-update-hx8lr" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.453848 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-927b-account-create-update-7jqb5" event={"ID":"2d740232-221b-4667-bb8e-995d626b74ce","Type":"ContainerDied","Data":"31338ba979f9dfc7fb5635f01539af012c08409d9f314b959a72affe13f8de82"} Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.453905 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="31338ba979f9dfc7fb5635f01539af012c08409d9f314b959a72affe13f8de82" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.453975 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-927b-account-create-update-7jqb5" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.461490 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-x45nc" event={"ID":"9b827db3-b1ee-4b2d-8e57-807fc9449549","Type":"ContainerDied","Data":"84a53d2358866762e351db694f6a708319b5fceba08e71f11a9bbe01022337d7"} Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.461521 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84a53d2358866762e351db694f6a708319b5fceba08e71f11a9bbe01022337d7" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.461565 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-x45nc" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.465235 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-gg9j7" event={"ID":"3cd3284d-eef1-46b8-a5eb-88c0a1772c61","Type":"ContainerDied","Data":"89b2cc44fa6235d9a860531d4694ee8a74ddeba41171259159db6351d137518a"} Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.465280 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="89b2cc44fa6235d9a860531d4694ee8a74ddeba41171259159db6351d137518a" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.465354 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-gg9j7" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.467526 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qgrwj" event={"ID":"8f5756a6-d59c-4653-ac3a-8ba1fd91862b","Type":"ContainerStarted","Data":"a9ef860e5552eed3443a89382124feae6c17a86b1237152806641bd2cd1f0107"} Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.470082 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-wgt4k" event={"ID":"d06df4fc-ddce-4c76-b612-6e94abc94c9d","Type":"ContainerDied","Data":"a8b8671094f9c1d66990dd84edfca9b4fbce60f80391bdb1d193052a0ab18264"} Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.470123 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a8b8671094f9c1d66990dd84edfca9b4fbce60f80391bdb1d193052a0ab18264" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.470196 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-wgt4k" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.471804 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-3f0f-account-create-update-8ptr9" event={"ID":"2ea88fc7-43b7-4b93-b9f0-e6833868a1cf","Type":"ContainerDied","Data":"14ac9cd4fb1ebac066a10f30a1ee7b0992384cd687fa020faf277fe8d2c289b4"} Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.471850 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14ac9cd4fb1ebac066a10f30a1ee7b0992384cd687fa020faf277fe8d2c289b4" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.471954 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3f0f-account-create-update-8ptr9" Dec 04 17:49:36 crc kubenswrapper[4631]: I1204 17:49:36.507235 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-qgrwj" podStartSLOduration=10.206658605 podStartE2EDuration="14.507204745s" podCreationTimestamp="2025-12-04 17:49:22 +0000 UTC" firstStartedPulling="2025-12-04 17:49:31.755932774 +0000 UTC m=+1301.788174772" lastFinishedPulling="2025-12-04 17:49:36.056478914 +0000 UTC m=+1306.088720912" observedRunningTime="2025-12-04 17:49:36.486996508 +0000 UTC m=+1306.519238516" watchObservedRunningTime="2025-12-04 17:49:36.507204745 +0000 UTC m=+1306.539446763" Dec 04 17:49:40 crc kubenswrapper[4631]: I1204 17:49:40.506918 4631 generic.go:334] "Generic (PLEG): container finished" podID="8f5756a6-d59c-4653-ac3a-8ba1fd91862b" containerID="a9ef860e5552eed3443a89382124feae6c17a86b1237152806641bd2cd1f0107" exitCode=0 Dec 04 17:49:40 crc kubenswrapper[4631]: I1204 17:49:40.507121 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qgrwj" event={"ID":"8f5756a6-d59c-4653-ac3a-8ba1fd91862b","Type":"ContainerDied","Data":"a9ef860e5552eed3443a89382124feae6c17a86b1237152806641bd2cd1f0107"} Dec 04 17:49:41 crc kubenswrapper[4631]: I1204 17:49:41.838908 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qgrwj" Dec 04 17:49:41 crc kubenswrapper[4631]: I1204 17:49:41.903673 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f5756a6-d59c-4653-ac3a-8ba1fd91862b-config-data\") pod \"8f5756a6-d59c-4653-ac3a-8ba1fd91862b\" (UID: \"8f5756a6-d59c-4653-ac3a-8ba1fd91862b\") " Dec 04 17:49:41 crc kubenswrapper[4631]: I1204 17:49:41.903797 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcg5k\" (UniqueName: \"kubernetes.io/projected/8f5756a6-d59c-4653-ac3a-8ba1fd91862b-kube-api-access-tcg5k\") pod \"8f5756a6-d59c-4653-ac3a-8ba1fd91862b\" (UID: \"8f5756a6-d59c-4653-ac3a-8ba1fd91862b\") " Dec 04 17:49:41 crc kubenswrapper[4631]: I1204 17:49:41.903824 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5756a6-d59c-4653-ac3a-8ba1fd91862b-combined-ca-bundle\") pod \"8f5756a6-d59c-4653-ac3a-8ba1fd91862b\" (UID: \"8f5756a6-d59c-4653-ac3a-8ba1fd91862b\") " Dec 04 17:49:41 crc kubenswrapper[4631]: I1204 17:49:41.911669 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f5756a6-d59c-4653-ac3a-8ba1fd91862b-kube-api-access-tcg5k" (OuterVolumeSpecName: "kube-api-access-tcg5k") pod "8f5756a6-d59c-4653-ac3a-8ba1fd91862b" (UID: "8f5756a6-d59c-4653-ac3a-8ba1fd91862b"). InnerVolumeSpecName "kube-api-access-tcg5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:49:41 crc kubenswrapper[4631]: I1204 17:49:41.936336 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f5756a6-d59c-4653-ac3a-8ba1fd91862b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8f5756a6-d59c-4653-ac3a-8ba1fd91862b" (UID: "8f5756a6-d59c-4653-ac3a-8ba1fd91862b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:49:41 crc kubenswrapper[4631]: I1204 17:49:41.991882 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f5756a6-d59c-4653-ac3a-8ba1fd91862b-config-data" (OuterVolumeSpecName: "config-data") pod "8f5756a6-d59c-4653-ac3a-8ba1fd91862b" (UID: "8f5756a6-d59c-4653-ac3a-8ba1fd91862b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.011009 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f5756a6-d59c-4653-ac3a-8ba1fd91862b-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.011058 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcg5k\" (UniqueName: \"kubernetes.io/projected/8f5756a6-d59c-4653-ac3a-8ba1fd91862b-kube-api-access-tcg5k\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.011091 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5756a6-d59c-4653-ac3a-8ba1fd91862b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.524433 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qgrwj" event={"ID":"8f5756a6-d59c-4653-ac3a-8ba1fd91862b","Type":"ContainerDied","Data":"7fd080c6b890ea8ca7541ff045f02ea62878b1877826cd167692b0e71d771657"} Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.524474 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7fd080c6b890ea8ca7541ff045f02ea62878b1877826cd167692b0e71d771657" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.524557 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qgrwj" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.832435 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-xhwzb"] Dec 04 17:49:42 crc kubenswrapper[4631]: E1204 17:49:42.833123 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b28b17b-31f2-49c7-b089-dfd0275bc8ba" containerName="init" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.833140 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b28b17b-31f2-49c7-b089-dfd0275bc8ba" containerName="init" Dec 04 17:49:42 crc kubenswrapper[4631]: E1204 17:49:42.833158 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f09451c-0497-468a-a555-df590ba4e739" containerName="mariadb-account-create-update" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.833167 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f09451c-0497-468a-a555-df590ba4e739" containerName="mariadb-account-create-update" Dec 04 17:49:42 crc kubenswrapper[4631]: E1204 17:49:42.833183 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b28b17b-31f2-49c7-b089-dfd0275bc8ba" containerName="dnsmasq-dns" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.833192 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b28b17b-31f2-49c7-b089-dfd0275bc8ba" containerName="dnsmasq-dns" Dec 04 17:49:42 crc kubenswrapper[4631]: E1204 17:49:42.833203 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d740232-221b-4667-bb8e-995d626b74ce" containerName="mariadb-account-create-update" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.833211 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d740232-221b-4667-bb8e-995d626b74ce" containerName="mariadb-account-create-update" Dec 04 17:49:42 crc kubenswrapper[4631]: E1204 17:49:42.833221 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d06df4fc-ddce-4c76-b612-6e94abc94c9d" containerName="mariadb-database-create" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.833228 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="d06df4fc-ddce-4c76-b612-6e94abc94c9d" containerName="mariadb-database-create" Dec 04 17:49:42 crc kubenswrapper[4631]: E1204 17:49:42.833245 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f5756a6-d59c-4653-ac3a-8ba1fd91862b" containerName="keystone-db-sync" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.833253 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f5756a6-d59c-4653-ac3a-8ba1fd91862b" containerName="keystone-db-sync" Dec 04 17:49:42 crc kubenswrapper[4631]: E1204 17:49:42.833264 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b827db3-b1ee-4b2d-8e57-807fc9449549" containerName="mariadb-database-create" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.833272 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b827db3-b1ee-4b2d-8e57-807fc9449549" containerName="mariadb-database-create" Dec 04 17:49:42 crc kubenswrapper[4631]: E1204 17:49:42.833322 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ea88fc7-43b7-4b93-b9f0-e6833868a1cf" containerName="mariadb-account-create-update" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.833329 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ea88fc7-43b7-4b93-b9f0-e6833868a1cf" containerName="mariadb-account-create-update" Dec 04 17:49:42 crc kubenswrapper[4631]: E1204 17:49:42.833346 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cd3284d-eef1-46b8-a5eb-88c0a1772c61" containerName="mariadb-database-create" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.833354 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cd3284d-eef1-46b8-a5eb-88c0a1772c61" containerName="mariadb-database-create" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.833588 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f09451c-0497-468a-a555-df590ba4e739" containerName="mariadb-account-create-update" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.833613 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="d06df4fc-ddce-4c76-b612-6e94abc94c9d" containerName="mariadb-database-create" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.833624 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d740232-221b-4667-bb8e-995d626b74ce" containerName="mariadb-account-create-update" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.833636 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cd3284d-eef1-46b8-a5eb-88c0a1772c61" containerName="mariadb-database-create" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.833650 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ea88fc7-43b7-4b93-b9f0-e6833868a1cf" containerName="mariadb-account-create-update" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.833663 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b827db3-b1ee-4b2d-8e57-807fc9449549" containerName="mariadb-database-create" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.833676 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f5756a6-d59c-4653-ac3a-8ba1fd91862b" containerName="keystone-db-sync" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.833686 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b28b17b-31f2-49c7-b089-dfd0275bc8ba" containerName="dnsmasq-dns" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.834334 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.839966 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.841619 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.841841 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-sxcqt" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.841973 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.842165 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.848779 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-xhwzb"] Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.880512 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-sqxl8"] Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.882141 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.927295 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-fernet-keys\") pod \"keystone-bootstrap-xhwzb\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.927390 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-config-data\") pod \"keystone-bootstrap-xhwzb\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.927458 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-combined-ca-bundle\") pod \"keystone-bootstrap-xhwzb\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.927551 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-credential-keys\") pod \"keystone-bootstrap-xhwzb\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.927586 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-scripts\") pod \"keystone-bootstrap-xhwzb\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.927612 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8frtm\" (UniqueName: \"kubernetes.io/projected/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-kube-api-access-8frtm\") pod \"keystone-bootstrap-xhwzb\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:42 crc kubenswrapper[4631]: I1204 17:49:42.970192 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-sqxl8"] Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.029404 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-credential-keys\") pod \"keystone-bootstrap-xhwzb\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.029440 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-scripts\") pod \"keystone-bootstrap-xhwzb\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.029462 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8frtm\" (UniqueName: \"kubernetes.io/projected/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-kube-api-access-8frtm\") pod \"keystone-bootstrap-xhwzb\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.029492 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-fernet-keys\") pod \"keystone-bootstrap-xhwzb\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.029527 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-config-data\") pod \"keystone-bootstrap-xhwzb\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.029552 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-sqxl8\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.029571 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-config\") pod \"dnsmasq-dns-55fff446b9-sqxl8\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.029587 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vcrw\" (UniqueName: \"kubernetes.io/projected/046ef41f-6e70-47d1-b9df-9884366cf4da-kube-api-access-4vcrw\") pod \"dnsmasq-dns-55fff446b9-sqxl8\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.029628 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-combined-ca-bundle\") pod \"keystone-bootstrap-xhwzb\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.029670 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-dns-svc\") pod \"dnsmasq-dns-55fff446b9-sqxl8\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.029698 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-sqxl8\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.029721 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-sqxl8\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.042084 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-fernet-keys\") pod \"keystone-bootstrap-xhwzb\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.044445 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-config-data\") pod \"keystone-bootstrap-xhwzb\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.050870 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-credential-keys\") pod \"keystone-bootstrap-xhwzb\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.051231 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-combined-ca-bundle\") pod \"keystone-bootstrap-xhwzb\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.062309 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-49bhb"] Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.070276 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.081477 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.081712 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.081840 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-f5vb9" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.083077 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-scripts\") pod \"keystone-bootstrap-xhwzb\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.120436 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-49bhb"] Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.124965 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8frtm\" (UniqueName: \"kubernetes.io/projected/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-kube-api-access-8frtm\") pod \"keystone-bootstrap-xhwzb\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.132965 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-scripts\") pod \"cinder-db-sync-49bhb\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.133044 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-dns-svc\") pod \"dnsmasq-dns-55fff446b9-sqxl8\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.133073 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-sqxl8\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.133098 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-sqxl8\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.133125 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqkdh\" (UniqueName: \"kubernetes.io/projected/caa9015d-d530-4caa-8a24-2338d69519a3-kube-api-access-hqkdh\") pod \"cinder-db-sync-49bhb\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.133161 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/caa9015d-d530-4caa-8a24-2338d69519a3-etc-machine-id\") pod \"cinder-db-sync-49bhb\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.133179 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-config-data\") pod \"cinder-db-sync-49bhb\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.133193 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-combined-ca-bundle\") pod \"cinder-db-sync-49bhb\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.133214 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-sqxl8\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.133234 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-config\") pod \"dnsmasq-dns-55fff446b9-sqxl8\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.133250 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vcrw\" (UniqueName: \"kubernetes.io/projected/046ef41f-6e70-47d1-b9df-9884366cf4da-kube-api-access-4vcrw\") pod \"dnsmasq-dns-55fff446b9-sqxl8\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.133272 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-db-sync-config-data\") pod \"cinder-db-sync-49bhb\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.134165 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-dns-svc\") pod \"dnsmasq-dns-55fff446b9-sqxl8\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.134743 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-sqxl8\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.135261 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-sqxl8\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.135782 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-sqxl8\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.137543 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-5dhm2"] Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.138599 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-5dhm2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.140404 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.142037 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-6vns7" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.142230 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.151135 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-config\") pod \"dnsmasq-dns-55fff446b9-sqxl8\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.168194 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.180201 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-57cbdfdf9-fpjrr"] Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.182157 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-57cbdfdf9-fpjrr" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.192111 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-6s2vv" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.197657 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.197838 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.197954 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.210414 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-5dhm2"] Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.210958 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vcrw\" (UniqueName: \"kubernetes.io/projected/046ef41f-6e70-47d1-b9df-9884366cf4da-kube-api-access-4vcrw\") pod \"dnsmasq-dns-55fff446b9-sqxl8\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.221824 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.235183 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-scripts\") pod \"cinder-db-sync-49bhb\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.235234 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3aa97a3-43fb-4478-bd4e-e06494e42efd-combined-ca-bundle\") pod \"neutron-db-sync-5dhm2\" (UID: \"d3aa97a3-43fb-4478-bd4e-e06494e42efd\") " pod="openstack/neutron-db-sync-5dhm2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.235272 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q78vw\" (UniqueName: \"kubernetes.io/projected/d3aa97a3-43fb-4478-bd4e-e06494e42efd-kube-api-access-q78vw\") pod \"neutron-db-sync-5dhm2\" (UID: \"d3aa97a3-43fb-4478-bd4e-e06494e42efd\") " pod="openstack/neutron-db-sync-5dhm2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.235294 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d3aa97a3-43fb-4478-bd4e-e06494e42efd-config\") pod \"neutron-db-sync-5dhm2\" (UID: \"d3aa97a3-43fb-4478-bd4e-e06494e42efd\") " pod="openstack/neutron-db-sync-5dhm2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.235346 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqkdh\" (UniqueName: \"kubernetes.io/projected/caa9015d-d530-4caa-8a24-2338d69519a3-kube-api-access-hqkdh\") pod \"cinder-db-sync-49bhb\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.235509 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/caa9015d-d530-4caa-8a24-2338d69519a3-etc-machine-id\") pod \"cinder-db-sync-49bhb\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.235532 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-config-data\") pod \"cinder-db-sync-49bhb\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.235556 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-combined-ca-bundle\") pod \"cinder-db-sync-49bhb\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.235586 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-db-sync-config-data\") pod \"cinder-db-sync-49bhb\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.238817 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/caa9015d-d530-4caa-8a24-2338d69519a3-etc-machine-id\") pod \"cinder-db-sync-49bhb\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.241659 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-57cbdfdf9-fpjrr"] Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.243487 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-config-data\") pod \"cinder-db-sync-49bhb\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.244003 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-scripts\") pod \"cinder-db-sync-49bhb\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.254894 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-db-sync-config-data\") pod \"cinder-db-sync-49bhb\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.282484 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-combined-ca-bundle\") pod \"cinder-db-sync-49bhb\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.309524 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqkdh\" (UniqueName: \"kubernetes.io/projected/caa9015d-d530-4caa-8a24-2338d69519a3-kube-api-access-hqkdh\") pod \"cinder-db-sync-49bhb\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.337417 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/912d634b-beda-4d89-8dfa-ca97a1d3f54e-horizon-secret-key\") pod \"horizon-57cbdfdf9-fpjrr\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " pod="openstack/horizon-57cbdfdf9-fpjrr" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.337461 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/912d634b-beda-4d89-8dfa-ca97a1d3f54e-scripts\") pod \"horizon-57cbdfdf9-fpjrr\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " pod="openstack/horizon-57cbdfdf9-fpjrr" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.337500 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/912d634b-beda-4d89-8dfa-ca97a1d3f54e-logs\") pod \"horizon-57cbdfdf9-fpjrr\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " pod="openstack/horizon-57cbdfdf9-fpjrr" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.337569 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3aa97a3-43fb-4478-bd4e-e06494e42efd-combined-ca-bundle\") pod \"neutron-db-sync-5dhm2\" (UID: \"d3aa97a3-43fb-4478-bd4e-e06494e42efd\") " pod="openstack/neutron-db-sync-5dhm2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.337597 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2hvj\" (UniqueName: \"kubernetes.io/projected/912d634b-beda-4d89-8dfa-ca97a1d3f54e-kube-api-access-t2hvj\") pod \"horizon-57cbdfdf9-fpjrr\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " pod="openstack/horizon-57cbdfdf9-fpjrr" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.337630 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/912d634b-beda-4d89-8dfa-ca97a1d3f54e-config-data\") pod \"horizon-57cbdfdf9-fpjrr\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " pod="openstack/horizon-57cbdfdf9-fpjrr" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.337648 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q78vw\" (UniqueName: \"kubernetes.io/projected/d3aa97a3-43fb-4478-bd4e-e06494e42efd-kube-api-access-q78vw\") pod \"neutron-db-sync-5dhm2\" (UID: \"d3aa97a3-43fb-4478-bd4e-e06494e42efd\") " pod="openstack/neutron-db-sync-5dhm2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.337669 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d3aa97a3-43fb-4478-bd4e-e06494e42efd-config\") pod \"neutron-db-sync-5dhm2\" (UID: \"d3aa97a3-43fb-4478-bd4e-e06494e42efd\") " pod="openstack/neutron-db-sync-5dhm2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.344632 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.348960 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3aa97a3-43fb-4478-bd4e-e06494e42efd-combined-ca-bundle\") pod \"neutron-db-sync-5dhm2\" (UID: \"d3aa97a3-43fb-4478-bd4e-e06494e42efd\") " pod="openstack/neutron-db-sync-5dhm2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.349257 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d3aa97a3-43fb-4478-bd4e-e06494e42efd-config\") pod \"neutron-db-sync-5dhm2\" (UID: \"d3aa97a3-43fb-4478-bd4e-e06494e42efd\") " pod="openstack/neutron-db-sync-5dhm2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.361711 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.376805 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.377381 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.414591 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q78vw\" (UniqueName: \"kubernetes.io/projected/d3aa97a3-43fb-4478-bd4e-e06494e42efd-kube-api-access-q78vw\") pod \"neutron-db-sync-5dhm2\" (UID: \"d3aa97a3-43fb-4478-bd4e-e06494e42efd\") " pod="openstack/neutron-db-sync-5dhm2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.431028 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.439012 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-scripts\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.439084 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/912d634b-beda-4d89-8dfa-ca97a1d3f54e-horizon-secret-key\") pod \"horizon-57cbdfdf9-fpjrr\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " pod="openstack/horizon-57cbdfdf9-fpjrr" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.439109 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-log-httpd\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.439143 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/912d634b-beda-4d89-8dfa-ca97a1d3f54e-scripts\") pod \"horizon-57cbdfdf9-fpjrr\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " pod="openstack/horizon-57cbdfdf9-fpjrr" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.439166 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xbdd\" (UniqueName: \"kubernetes.io/projected/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-kube-api-access-8xbdd\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.439183 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/912d634b-beda-4d89-8dfa-ca97a1d3f54e-logs\") pod \"horizon-57cbdfdf9-fpjrr\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " pod="openstack/horizon-57cbdfdf9-fpjrr" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.439216 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.439240 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-config-data\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.439257 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-run-httpd\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.439297 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.439358 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2hvj\" (UniqueName: \"kubernetes.io/projected/912d634b-beda-4d89-8dfa-ca97a1d3f54e-kube-api-access-t2hvj\") pod \"horizon-57cbdfdf9-fpjrr\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " pod="openstack/horizon-57cbdfdf9-fpjrr" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.439394 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/912d634b-beda-4d89-8dfa-ca97a1d3f54e-config-data\") pod \"horizon-57cbdfdf9-fpjrr\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " pod="openstack/horizon-57cbdfdf9-fpjrr" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.447548 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/912d634b-beda-4d89-8dfa-ca97a1d3f54e-scripts\") pod \"horizon-57cbdfdf9-fpjrr\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " pod="openstack/horizon-57cbdfdf9-fpjrr" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.447777 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/912d634b-beda-4d89-8dfa-ca97a1d3f54e-logs\") pod \"horizon-57cbdfdf9-fpjrr\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " pod="openstack/horizon-57cbdfdf9-fpjrr" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.448391 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-tf4br"] Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.449422 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-tf4br" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.466710 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/912d634b-beda-4d89-8dfa-ca97a1d3f54e-config-data\") pod \"horizon-57cbdfdf9-fpjrr\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " pod="openstack/horizon-57cbdfdf9-fpjrr" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.468327 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/912d634b-beda-4d89-8dfa-ca97a1d3f54e-horizon-secret-key\") pod \"horizon-57cbdfdf9-fpjrr\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " pod="openstack/horizon-57cbdfdf9-fpjrr" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.468866 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.473737 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.473991 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-9gz46" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.501386 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2hvj\" (UniqueName: \"kubernetes.io/projected/912d634b-beda-4d89-8dfa-ca97a1d3f54e-kube-api-access-t2hvj\") pod \"horizon-57cbdfdf9-fpjrr\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " pod="openstack/horizon-57cbdfdf9-fpjrr" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.523436 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-tf4br"] Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.524235 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-49bhb" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.542117 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xbdd\" (UniqueName: \"kubernetes.io/projected/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-kube-api-access-8xbdd\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.542198 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.542264 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-config-data\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.542301 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-run-httpd\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.542336 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.542617 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-config-data\") pod \"placement-db-sync-tf4br\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " pod="openstack/placement-db-sync-tf4br" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.542679 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-scripts\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.542749 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-scripts\") pod \"placement-db-sync-tf4br\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " pod="openstack/placement-db-sync-tf4br" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.542790 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pkmh\" (UniqueName: \"kubernetes.io/projected/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-kube-api-access-5pkmh\") pod \"placement-db-sync-tf4br\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " pod="openstack/placement-db-sync-tf4br" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.542842 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-log-httpd\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.542881 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-logs\") pod \"placement-db-sync-tf4br\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " pod="openstack/placement-db-sync-tf4br" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.542906 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-combined-ca-bundle\") pod \"placement-db-sync-tf4br\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " pod="openstack/placement-db-sync-tf4br" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.554407 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-run-httpd\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.569105 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-scripts\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.573269 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.577468 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-config-data\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.577541 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-sqxl8"] Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.579951 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-log-httpd\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.580495 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.590181 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xbdd\" (UniqueName: \"kubernetes.io/projected/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-kube-api-access-8xbdd\") pod \"ceilometer-0\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.631239 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-57cbdfdf9-fpjrr" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.632669 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-5dhm2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.646420 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-logs\") pod \"placement-db-sync-tf4br\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " pod="openstack/placement-db-sync-tf4br" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.646749 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-combined-ca-bundle\") pod \"placement-db-sync-tf4br\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " pod="openstack/placement-db-sync-tf4br" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.646872 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-config-data\") pod \"placement-db-sync-tf4br\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " pod="openstack/placement-db-sync-tf4br" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.646914 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-scripts\") pod \"placement-db-sync-tf4br\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " pod="openstack/placement-db-sync-tf4br" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.646941 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pkmh\" (UniqueName: \"kubernetes.io/projected/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-kube-api-access-5pkmh\") pod \"placement-db-sync-tf4br\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " pod="openstack/placement-db-sync-tf4br" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.646976 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-logs\") pod \"placement-db-sync-tf4br\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " pod="openstack/placement-db-sync-tf4br" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.658092 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-combined-ca-bundle\") pod \"placement-db-sync-tf4br\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " pod="openstack/placement-db-sync-tf4br" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.661743 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-scripts\") pod \"placement-db-sync-tf4br\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " pod="openstack/placement-db-sync-tf4br" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.663473 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-config-data\") pod \"placement-db-sync-tf4br\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " pod="openstack/placement-db-sync-tf4br" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.672477 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-jbcc2"] Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.673913 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jbcc2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.677562 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pkmh\" (UniqueName: \"kubernetes.io/projected/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-kube-api-access-5pkmh\") pod \"placement-db-sync-tf4br\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " pod="openstack/placement-db-sync-tf4br" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.681002 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.681198 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-8nlq9" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.719619 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-jbcc2"] Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.736615 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-h4pr5"] Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.739298 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.740796 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.750800 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dt5fr\" (UniqueName: \"kubernetes.io/projected/d0aff05c-75cd-495a-903e-83b72596bf86-kube-api-access-dt5fr\") pod \"barbican-db-sync-jbcc2\" (UID: \"d0aff05c-75cd-495a-903e-83b72596bf86\") " pod="openstack/barbican-db-sync-jbcc2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.750869 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-h4pr5\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.750937 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-h4pr5\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.751125 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0aff05c-75cd-495a-903e-83b72596bf86-combined-ca-bundle\") pod \"barbican-db-sync-jbcc2\" (UID: \"d0aff05c-75cd-495a-903e-83b72596bf86\") " pod="openstack/barbican-db-sync-jbcc2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.751204 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-h4pr5\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.751232 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-config\") pod \"dnsmasq-dns-76fcf4b695-h4pr5\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.751263 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zlgh\" (UniqueName: \"kubernetes.io/projected/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-kube-api-access-2zlgh\") pod \"dnsmasq-dns-76fcf4b695-h4pr5\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.751310 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d0aff05c-75cd-495a-903e-83b72596bf86-db-sync-config-data\") pod \"barbican-db-sync-jbcc2\" (UID: \"d0aff05c-75cd-495a-903e-83b72596bf86\") " pod="openstack/barbican-db-sync-jbcc2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.751407 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-h4pr5\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.779990 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6d7dd8f89f-qsbxc"] Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.786185 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d7dd8f89f-qsbxc" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.792146 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6d7dd8f89f-qsbxc"] Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.806564 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-h4pr5"] Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.825502 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-tf4br" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.852675 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/56ad69c1-bd7a-4147-9a1b-3119818cbe62-config-data\") pod \"horizon-6d7dd8f89f-qsbxc\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " pod="openstack/horizon-6d7dd8f89f-qsbxc" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.852725 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-h4pr5\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.852757 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/56ad69c1-bd7a-4147-9a1b-3119818cbe62-scripts\") pod \"horizon-6d7dd8f89f-qsbxc\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " pod="openstack/horizon-6d7dd8f89f-qsbxc" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.852781 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0aff05c-75cd-495a-903e-83b72596bf86-combined-ca-bundle\") pod \"barbican-db-sync-jbcc2\" (UID: \"d0aff05c-75cd-495a-903e-83b72596bf86\") " pod="openstack/barbican-db-sync-jbcc2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.852801 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/56ad69c1-bd7a-4147-9a1b-3119818cbe62-horizon-secret-key\") pod \"horizon-6d7dd8f89f-qsbxc\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " pod="openstack/horizon-6d7dd8f89f-qsbxc" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.852825 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-h4pr5\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.852851 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-config\") pod \"dnsmasq-dns-76fcf4b695-h4pr5\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.852871 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zlgh\" (UniqueName: \"kubernetes.io/projected/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-kube-api-access-2zlgh\") pod \"dnsmasq-dns-76fcf4b695-h4pr5\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.852895 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cd27b\" (UniqueName: \"kubernetes.io/projected/56ad69c1-bd7a-4147-9a1b-3119818cbe62-kube-api-access-cd27b\") pod \"horizon-6d7dd8f89f-qsbxc\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " pod="openstack/horizon-6d7dd8f89f-qsbxc" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.852915 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d0aff05c-75cd-495a-903e-83b72596bf86-db-sync-config-data\") pod \"barbican-db-sync-jbcc2\" (UID: \"d0aff05c-75cd-495a-903e-83b72596bf86\") " pod="openstack/barbican-db-sync-jbcc2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.852949 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-h4pr5\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.852968 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56ad69c1-bd7a-4147-9a1b-3119818cbe62-logs\") pod \"horizon-6d7dd8f89f-qsbxc\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " pod="openstack/horizon-6d7dd8f89f-qsbxc" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.852998 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-h4pr5\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.853013 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dt5fr\" (UniqueName: \"kubernetes.io/projected/d0aff05c-75cd-495a-903e-83b72596bf86-kube-api-access-dt5fr\") pod \"barbican-db-sync-jbcc2\" (UID: \"d0aff05c-75cd-495a-903e-83b72596bf86\") " pod="openstack/barbican-db-sync-jbcc2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.854321 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-h4pr5\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.857598 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-h4pr5\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.858095 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-config\") pod \"dnsmasq-dns-76fcf4b695-h4pr5\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.858872 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-h4pr5\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.861039 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-h4pr5\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.863783 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0aff05c-75cd-495a-903e-83b72596bf86-combined-ca-bundle\") pod \"barbican-db-sync-jbcc2\" (UID: \"d0aff05c-75cd-495a-903e-83b72596bf86\") " pod="openstack/barbican-db-sync-jbcc2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.873302 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d0aff05c-75cd-495a-903e-83b72596bf86-db-sync-config-data\") pod \"barbican-db-sync-jbcc2\" (UID: \"d0aff05c-75cd-495a-903e-83b72596bf86\") " pod="openstack/barbican-db-sync-jbcc2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.887428 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dt5fr\" (UniqueName: \"kubernetes.io/projected/d0aff05c-75cd-495a-903e-83b72596bf86-kube-api-access-dt5fr\") pod \"barbican-db-sync-jbcc2\" (UID: \"d0aff05c-75cd-495a-903e-83b72596bf86\") " pod="openstack/barbican-db-sync-jbcc2" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.907280 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zlgh\" (UniqueName: \"kubernetes.io/projected/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-kube-api-access-2zlgh\") pod \"dnsmasq-dns-76fcf4b695-h4pr5\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.958493 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56ad69c1-bd7a-4147-9a1b-3119818cbe62-logs\") pod \"horizon-6d7dd8f89f-qsbxc\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " pod="openstack/horizon-6d7dd8f89f-qsbxc" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.958593 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/56ad69c1-bd7a-4147-9a1b-3119818cbe62-config-data\") pod \"horizon-6d7dd8f89f-qsbxc\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " pod="openstack/horizon-6d7dd8f89f-qsbxc" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.958641 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/56ad69c1-bd7a-4147-9a1b-3119818cbe62-scripts\") pod \"horizon-6d7dd8f89f-qsbxc\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " pod="openstack/horizon-6d7dd8f89f-qsbxc" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.958677 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/56ad69c1-bd7a-4147-9a1b-3119818cbe62-horizon-secret-key\") pod \"horizon-6d7dd8f89f-qsbxc\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " pod="openstack/horizon-6d7dd8f89f-qsbxc" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.958720 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cd27b\" (UniqueName: \"kubernetes.io/projected/56ad69c1-bd7a-4147-9a1b-3119818cbe62-kube-api-access-cd27b\") pod \"horizon-6d7dd8f89f-qsbxc\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " pod="openstack/horizon-6d7dd8f89f-qsbxc" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.961161 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/56ad69c1-bd7a-4147-9a1b-3119818cbe62-scripts\") pod \"horizon-6d7dd8f89f-qsbxc\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " pod="openstack/horizon-6d7dd8f89f-qsbxc" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.962206 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/56ad69c1-bd7a-4147-9a1b-3119818cbe62-config-data\") pod \"horizon-6d7dd8f89f-qsbxc\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " pod="openstack/horizon-6d7dd8f89f-qsbxc" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.971456 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56ad69c1-bd7a-4147-9a1b-3119818cbe62-logs\") pod \"horizon-6d7dd8f89f-qsbxc\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " pod="openstack/horizon-6d7dd8f89f-qsbxc" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.973932 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/56ad69c1-bd7a-4147-9a1b-3119818cbe62-horizon-secret-key\") pod \"horizon-6d7dd8f89f-qsbxc\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " pod="openstack/horizon-6d7dd8f89f-qsbxc" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.977856 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cd27b\" (UniqueName: \"kubernetes.io/projected/56ad69c1-bd7a-4147-9a1b-3119818cbe62-kube-api-access-cd27b\") pod \"horizon-6d7dd8f89f-qsbxc\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " pod="openstack/horizon-6d7dd8f89f-qsbxc" Dec 04 17:49:43 crc kubenswrapper[4631]: I1204 17:49:43.988323 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:44 crc kubenswrapper[4631]: I1204 17:49:44.010010 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d7dd8f89f-qsbxc" Dec 04 17:49:44 crc kubenswrapper[4631]: I1204 17:49:44.030042 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jbcc2" Dec 04 17:49:44 crc kubenswrapper[4631]: I1204 17:49:44.125632 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-sqxl8"] Dec 04 17:49:44 crc kubenswrapper[4631]: I1204 17:49:44.314428 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-xhwzb"] Dec 04 17:49:44 crc kubenswrapper[4631]: I1204 17:49:44.413732 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-tf4br"] Dec 04 17:49:44 crc kubenswrapper[4631]: W1204 17:49:44.430983 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6763071_ba0b_4ef7_9843_9a4c66fe4a6f.slice/crio-16b7f0e5ea1df1b770a4698770f3ad4ee14fa01d9831e11036893577c954cad0 WatchSource:0}: Error finding container 16b7f0e5ea1df1b770a4698770f3ad4ee14fa01d9831e11036893577c954cad0: Status 404 returned error can't find the container with id 16b7f0e5ea1df1b770a4698770f3ad4ee14fa01d9831e11036893577c954cad0 Dec 04 17:49:44 crc kubenswrapper[4631]: I1204 17:49:44.466279 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-49bhb"] Dec 04 17:49:44 crc kubenswrapper[4631]: I1204 17:49:44.478458 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-5dhm2"] Dec 04 17:49:44 crc kubenswrapper[4631]: W1204 17:49:44.493848 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3aa97a3_43fb_4478_bd4e_e06494e42efd.slice/crio-80d354d1e8eb35d740a44a9ad41f61bd590a7ed753dc82b87b84a83bbdea7672 WatchSource:0}: Error finding container 80d354d1e8eb35d740a44a9ad41f61bd590a7ed753dc82b87b84a83bbdea7672: Status 404 returned error can't find the container with id 80d354d1e8eb35d740a44a9ad41f61bd590a7ed753dc82b87b84a83bbdea7672 Dec 04 17:49:44 crc kubenswrapper[4631]: I1204 17:49:44.566407 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" event={"ID":"046ef41f-6e70-47d1-b9df-9884366cf4da","Type":"ContainerStarted","Data":"20b8add0e1588a93dbeba545d203c2915b69059c88a2193c5be1e99378914111"} Dec 04 17:49:44 crc kubenswrapper[4631]: I1204 17:49:44.571795 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-49bhb" event={"ID":"caa9015d-d530-4caa-8a24-2338d69519a3","Type":"ContainerStarted","Data":"dcc346068c456b64385b4cdeae483a29f60bfe2bc627be94faeaa506c129c931"} Dec 04 17:49:44 crc kubenswrapper[4631]: I1204 17:49:44.583461 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-tf4br" event={"ID":"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f","Type":"ContainerStarted","Data":"16b7f0e5ea1df1b770a4698770f3ad4ee14fa01d9831e11036893577c954cad0"} Dec 04 17:49:44 crc kubenswrapper[4631]: I1204 17:49:44.591635 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-5dhm2" event={"ID":"d3aa97a3-43fb-4478-bd4e-e06494e42efd","Type":"ContainerStarted","Data":"80d354d1e8eb35d740a44a9ad41f61bd590a7ed753dc82b87b84a83bbdea7672"} Dec 04 17:49:44 crc kubenswrapper[4631]: I1204 17:49:44.597726 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xhwzb" event={"ID":"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0","Type":"ContainerStarted","Data":"01c5dfad59f4c8f9f0226b6dee97402308c3cb0c73d101dc109c31f00da4d562"} Dec 04 17:49:44 crc kubenswrapper[4631]: I1204 17:49:44.707638 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-57cbdfdf9-fpjrr"] Dec 04 17:49:44 crc kubenswrapper[4631]: I1204 17:49:44.902153 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6d7dd8f89f-qsbxc"] Dec 04 17:49:44 crc kubenswrapper[4631]: I1204 17:49:44.914691 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:49:44 crc kubenswrapper[4631]: I1204 17:49:44.925605 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-jbcc2"] Dec 04 17:49:44 crc kubenswrapper[4631]: I1204 17:49:44.940438 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-h4pr5"] Dec 04 17:49:45 crc kubenswrapper[4631]: W1204 17:49:45.024667 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b9c86a8_914f_4bd7_a3f8_7065e92a4b70.slice/crio-77901343db8cf582b4a0331644b225687c2b20e01627395d8a030df8f35749d2 WatchSource:0}: Error finding container 77901343db8cf582b4a0331644b225687c2b20e01627395d8a030df8f35749d2: Status 404 returned error can't find the container with id 77901343db8cf582b4a0331644b225687c2b20e01627395d8a030df8f35749d2 Dec 04 17:49:45 crc kubenswrapper[4631]: W1204 17:49:45.031516 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0aff05c_75cd_495a_903e_83b72596bf86.slice/crio-1ca03bb7dfdd5e9db1464866a5d873ae88bcec7bc54dbab686759d06205e6add WatchSource:0}: Error finding container 1ca03bb7dfdd5e9db1464866a5d873ae88bcec7bc54dbab686759d06205e6add: Status 404 returned error can't find the container with id 1ca03bb7dfdd5e9db1464866a5d873ae88bcec7bc54dbab686759d06205e6add Dec 04 17:49:45 crc kubenswrapper[4631]: I1204 17:49:45.626287 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" event={"ID":"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70","Type":"ContainerStarted","Data":"93d66d7230be80b7806ab9315d4f7686c911ae191d77588b16c7e39074be779d"} Dec 04 17:49:45 crc kubenswrapper[4631]: I1204 17:49:45.626580 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" event={"ID":"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70","Type":"ContainerStarted","Data":"77901343db8cf582b4a0331644b225687c2b20e01627395d8a030df8f35749d2"} Dec 04 17:49:45 crc kubenswrapper[4631]: I1204 17:49:45.662062 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-5dhm2" event={"ID":"d3aa97a3-43fb-4478-bd4e-e06494e42efd","Type":"ContainerStarted","Data":"1e81b276fa391be2298e89d2f2828e7d7929e13f82aacfca1ccae4831bcdf449"} Dec 04 17:49:45 crc kubenswrapper[4631]: I1204 17:49:45.679221 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jbcc2" event={"ID":"d0aff05c-75cd-495a-903e-83b72596bf86","Type":"ContainerStarted","Data":"1ca03bb7dfdd5e9db1464866a5d873ae88bcec7bc54dbab686759d06205e6add"} Dec 04 17:49:45 crc kubenswrapper[4631]: I1204 17:49:45.691968 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xhwzb" event={"ID":"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0","Type":"ContainerStarted","Data":"0a25248f2c8ff50b9a0cbd5633bd21d26b2a168752d964bef12dbce9e39e5236"} Dec 04 17:49:45 crc kubenswrapper[4631]: I1204 17:49:45.743297 4631 generic.go:334] "Generic (PLEG): container finished" podID="046ef41f-6e70-47d1-b9df-9884366cf4da" containerID="71df31ee8308ce63ab927dcec3f90b15e1362bd4249ef201fcbdddb9887f35c8" exitCode=0 Dec 04 17:49:45 crc kubenswrapper[4631]: I1204 17:49:45.743599 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" event={"ID":"046ef41f-6e70-47d1-b9df-9884366cf4da","Type":"ContainerDied","Data":"71df31ee8308ce63ab927dcec3f90b15e1362bd4249ef201fcbdddb9887f35c8"} Dec 04 17:49:45 crc kubenswrapper[4631]: I1204 17:49:45.752342 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-5dhm2" podStartSLOduration=2.752316424 podStartE2EDuration="2.752316424s" podCreationTimestamp="2025-12-04 17:49:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:49:45.702423349 +0000 UTC m=+1315.734665347" watchObservedRunningTime="2025-12-04 17:49:45.752316424 +0000 UTC m=+1315.784558422" Dec 04 17:49:45 crc kubenswrapper[4631]: I1204 17:49:45.761937 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d7dd8f89f-qsbxc" event={"ID":"56ad69c1-bd7a-4147-9a1b-3119818cbe62","Type":"ContainerStarted","Data":"9dea12ebd6c89ee4e57fc9d9419662f71503401df14efd0a414477dd274a00d7"} Dec 04 17:49:45 crc kubenswrapper[4631]: I1204 17:49:45.781100 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-xhwzb" podStartSLOduration=3.7810683750000003 podStartE2EDuration="3.781068375s" podCreationTimestamp="2025-12-04 17:49:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:49:45.730337556 +0000 UTC m=+1315.762579574" watchObservedRunningTime="2025-12-04 17:49:45.781068375 +0000 UTC m=+1315.813310383" Dec 04 17:49:45 crc kubenswrapper[4631]: I1204 17:49:45.825060 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc7c63c8-d4b4-49fd-81a7-1720d58d2934","Type":"ContainerStarted","Data":"46dcb6125da0d9205e08dcaf5026b594d4c50b7d5bad66e01511087ca9fea077"} Dec 04 17:49:45 crc kubenswrapper[4631]: I1204 17:49:45.826595 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-57cbdfdf9-fpjrr" event={"ID":"912d634b-beda-4d89-8dfa-ca97a1d3f54e","Type":"ContainerStarted","Data":"9e3d206a71a7493d64b92cc8b558d1c9f0ee4700d10d005b4faa5dcac613d2b6"} Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.003244 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-57cbdfdf9-fpjrr"] Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.048539 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-577ccf5fbc-b5scw"] Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.052346 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-577ccf5fbc-b5scw" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.093944 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-577ccf5fbc-b5scw"] Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.121071 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35c89cd7-210c-4475-9ffc-794e3f3456ac-logs\") pod \"horizon-577ccf5fbc-b5scw\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " pod="openstack/horizon-577ccf5fbc-b5scw" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.121155 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/35c89cd7-210c-4475-9ffc-794e3f3456ac-horizon-secret-key\") pod \"horizon-577ccf5fbc-b5scw\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " pod="openstack/horizon-577ccf5fbc-b5scw" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.121177 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35c89cd7-210c-4475-9ffc-794e3f3456ac-config-data\") pod \"horizon-577ccf5fbc-b5scw\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " pod="openstack/horizon-577ccf5fbc-b5scw" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.121605 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fks5\" (UniqueName: \"kubernetes.io/projected/35c89cd7-210c-4475-9ffc-794e3f3456ac-kube-api-access-2fks5\") pod \"horizon-577ccf5fbc-b5scw\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " pod="openstack/horizon-577ccf5fbc-b5scw" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.121642 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/35c89cd7-210c-4475-9ffc-794e3f3456ac-scripts\") pod \"horizon-577ccf5fbc-b5scw\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " pod="openstack/horizon-577ccf5fbc-b5scw" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.161252 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.222776 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35c89cd7-210c-4475-9ffc-794e3f3456ac-config-data\") pod \"horizon-577ccf5fbc-b5scw\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " pod="openstack/horizon-577ccf5fbc-b5scw" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.222885 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fks5\" (UniqueName: \"kubernetes.io/projected/35c89cd7-210c-4475-9ffc-794e3f3456ac-kube-api-access-2fks5\") pod \"horizon-577ccf5fbc-b5scw\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " pod="openstack/horizon-577ccf5fbc-b5scw" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.222917 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/35c89cd7-210c-4475-9ffc-794e3f3456ac-scripts\") pod \"horizon-577ccf5fbc-b5scw\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " pod="openstack/horizon-577ccf5fbc-b5scw" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.222949 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35c89cd7-210c-4475-9ffc-794e3f3456ac-logs\") pod \"horizon-577ccf5fbc-b5scw\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " pod="openstack/horizon-577ccf5fbc-b5scw" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.222976 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/35c89cd7-210c-4475-9ffc-794e3f3456ac-horizon-secret-key\") pod \"horizon-577ccf5fbc-b5scw\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " pod="openstack/horizon-577ccf5fbc-b5scw" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.224343 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/35c89cd7-210c-4475-9ffc-794e3f3456ac-scripts\") pod \"horizon-577ccf5fbc-b5scw\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " pod="openstack/horizon-577ccf5fbc-b5scw" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.224724 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35c89cd7-210c-4475-9ffc-794e3f3456ac-config-data\") pod \"horizon-577ccf5fbc-b5scw\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " pod="openstack/horizon-577ccf5fbc-b5scw" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.224839 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35c89cd7-210c-4475-9ffc-794e3f3456ac-logs\") pod \"horizon-577ccf5fbc-b5scw\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " pod="openstack/horizon-577ccf5fbc-b5scw" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.247731 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/35c89cd7-210c-4475-9ffc-794e3f3456ac-horizon-secret-key\") pod \"horizon-577ccf5fbc-b5scw\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " pod="openstack/horizon-577ccf5fbc-b5scw" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.255007 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fks5\" (UniqueName: \"kubernetes.io/projected/35c89cd7-210c-4475-9ffc-794e3f3456ac-kube-api-access-2fks5\") pod \"horizon-577ccf5fbc-b5scw\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " pod="openstack/horizon-577ccf5fbc-b5scw" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.322262 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.423679 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-577ccf5fbc-b5scw" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.426808 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-ovsdbserver-nb\") pod \"046ef41f-6e70-47d1-b9df-9884366cf4da\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.426915 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-config\") pod \"046ef41f-6e70-47d1-b9df-9884366cf4da\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.426992 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-dns-swift-storage-0\") pod \"046ef41f-6e70-47d1-b9df-9884366cf4da\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.427040 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-ovsdbserver-sb\") pod \"046ef41f-6e70-47d1-b9df-9884366cf4da\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.427142 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vcrw\" (UniqueName: \"kubernetes.io/projected/046ef41f-6e70-47d1-b9df-9884366cf4da-kube-api-access-4vcrw\") pod \"046ef41f-6e70-47d1-b9df-9884366cf4da\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.427212 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-dns-svc\") pod \"046ef41f-6e70-47d1-b9df-9884366cf4da\" (UID: \"046ef41f-6e70-47d1-b9df-9884366cf4da\") " Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.437827 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/046ef41f-6e70-47d1-b9df-9884366cf4da-kube-api-access-4vcrw" (OuterVolumeSpecName: "kube-api-access-4vcrw") pod "046ef41f-6e70-47d1-b9df-9884366cf4da" (UID: "046ef41f-6e70-47d1-b9df-9884366cf4da"). InnerVolumeSpecName "kube-api-access-4vcrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.473954 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "046ef41f-6e70-47d1-b9df-9884366cf4da" (UID: "046ef41f-6e70-47d1-b9df-9884366cf4da"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.486137 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "046ef41f-6e70-47d1-b9df-9884366cf4da" (UID: "046ef41f-6e70-47d1-b9df-9884366cf4da"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.501271 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "046ef41f-6e70-47d1-b9df-9884366cf4da" (UID: "046ef41f-6e70-47d1-b9df-9884366cf4da"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.506799 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "046ef41f-6e70-47d1-b9df-9884366cf4da" (UID: "046ef41f-6e70-47d1-b9df-9884366cf4da"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.516214 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-config" (OuterVolumeSpecName: "config") pod "046ef41f-6e70-47d1-b9df-9884366cf4da" (UID: "046ef41f-6e70-47d1-b9df-9884366cf4da"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.533924 4631 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.533974 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.533986 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.533999 4631 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.534010 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/046ef41f-6e70-47d1-b9df-9884366cf4da-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.534046 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vcrw\" (UniqueName: \"kubernetes.io/projected/046ef41f-6e70-47d1-b9df-9884366cf4da-kube-api-access-4vcrw\") on node \"crc\" DevicePath \"\"" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.872316 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" event={"ID":"046ef41f-6e70-47d1-b9df-9884366cf4da","Type":"ContainerDied","Data":"20b8add0e1588a93dbeba545d203c2915b69059c88a2193c5be1e99378914111"} Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.872669 4631 scope.go:117] "RemoveContainer" containerID="71df31ee8308ce63ab927dcec3f90b15e1362bd4249ef201fcbdddb9887f35c8" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.872476 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-sqxl8" Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.880115 4631 generic.go:334] "Generic (PLEG): container finished" podID="2b9c86a8-914f-4bd7-a3f8-7065e92a4b70" containerID="93d66d7230be80b7806ab9315d4f7686c911ae191d77588b16c7e39074be779d" exitCode=0 Dec 04 17:49:46 crc kubenswrapper[4631]: I1204 17:49:46.880932 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" event={"ID":"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70","Type":"ContainerDied","Data":"93d66d7230be80b7806ab9315d4f7686c911ae191d77588b16c7e39074be779d"} Dec 04 17:49:47 crc kubenswrapper[4631]: I1204 17:49:47.046208 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-577ccf5fbc-b5scw"] Dec 04 17:49:47 crc kubenswrapper[4631]: W1204 17:49:47.080972 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35c89cd7_210c_4475_9ffc_794e3f3456ac.slice/crio-545e14f888b5ad37956504c9e022c6a9fa76df1edd469df41e2e3f4a2015bc58 WatchSource:0}: Error finding container 545e14f888b5ad37956504c9e022c6a9fa76df1edd469df41e2e3f4a2015bc58: Status 404 returned error can't find the container with id 545e14f888b5ad37956504c9e022c6a9fa76df1edd469df41e2e3f4a2015bc58 Dec 04 17:49:47 crc kubenswrapper[4631]: I1204 17:49:47.119070 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-sqxl8"] Dec 04 17:49:47 crc kubenswrapper[4631]: I1204 17:49:47.143780 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-sqxl8"] Dec 04 17:49:47 crc kubenswrapper[4631]: I1204 17:49:47.896104 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-577ccf5fbc-b5scw" event={"ID":"35c89cd7-210c-4475-9ffc-794e3f3456ac","Type":"ContainerStarted","Data":"545e14f888b5ad37956504c9e022c6a9fa76df1edd469df41e2e3f4a2015bc58"} Dec 04 17:49:47 crc kubenswrapper[4631]: I1204 17:49:47.902346 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" event={"ID":"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70","Type":"ContainerStarted","Data":"7889725e9ccb0c9ca6981fcffba80a40b9dfbec70152cd541065295b0accb348"} Dec 04 17:49:48 crc kubenswrapper[4631]: I1204 17:49:48.255910 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="046ef41f-6e70-47d1-b9df-9884366cf4da" path="/var/lib/kubelet/pods/046ef41f-6e70-47d1-b9df-9884366cf4da/volumes" Dec 04 17:49:48 crc kubenswrapper[4631]: I1204 17:49:48.910771 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:48 crc kubenswrapper[4631]: I1204 17:49:48.932439 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" podStartSLOduration=5.932423217 podStartE2EDuration="5.932423217s" podCreationTimestamp="2025-12-04 17:49:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:49:48.931627345 +0000 UTC m=+1318.963869343" watchObservedRunningTime="2025-12-04 17:49:48.932423217 +0000 UTC m=+1318.964665215" Dec 04 17:49:49 crc kubenswrapper[4631]: I1204 17:49:49.941280 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-4hqks" event={"ID":"30425aae-4c9c-445c-8d10-d4e5874fda30","Type":"ContainerStarted","Data":"8093fdc24b1052668163f4b7f21ebb473ba522ed88cd98c8f3c711e54ec19ab2"} Dec 04 17:49:49 crc kubenswrapper[4631]: I1204 17:49:49.963393 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-4hqks" podStartSLOduration=2.754804128 podStartE2EDuration="36.963358228s" podCreationTimestamp="2025-12-04 17:49:13 +0000 UTC" firstStartedPulling="2025-12-04 17:49:14.50613977 +0000 UTC m=+1284.538381768" lastFinishedPulling="2025-12-04 17:49:48.71469387 +0000 UTC m=+1318.746935868" observedRunningTime="2025-12-04 17:49:49.955558585 +0000 UTC m=+1319.987800593" watchObservedRunningTime="2025-12-04 17:49:49.963358228 +0000 UTC m=+1319.995600226" Dec 04 17:49:50 crc kubenswrapper[4631]: I1204 17:49:50.949662 4631 generic.go:334] "Generic (PLEG): container finished" podID="beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0" containerID="0a25248f2c8ff50b9a0cbd5633bd21d26b2a168752d964bef12dbce9e39e5236" exitCode=0 Dec 04 17:49:50 crc kubenswrapper[4631]: I1204 17:49:50.949703 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xhwzb" event={"ID":"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0","Type":"ContainerDied","Data":"0a25248f2c8ff50b9a0cbd5633bd21d26b2a168752d964bef12dbce9e39e5236"} Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.524590 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6d7dd8f89f-qsbxc"] Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.553662 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7b99dd8d64-9nrvl"] Dec 04 17:49:52 crc kubenswrapper[4631]: E1204 17:49:52.554135 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046ef41f-6e70-47d1-b9df-9884366cf4da" containerName="init" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.554152 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="046ef41f-6e70-47d1-b9df-9884366cf4da" containerName="init" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.554503 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="046ef41f-6e70-47d1-b9df-9884366cf4da" containerName="init" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.555658 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.558644 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.579763 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7b99dd8d64-9nrvl"] Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.649677 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-577ccf5fbc-b5scw"] Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.675348 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-77d5fd455b-8kwkp"] Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.679359 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.687941 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-77d5fd455b-8kwkp"] Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.687973 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a675d52a-03e9-46e8-8b51-4e7f378179cf-scripts\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.688005 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a675d52a-03e9-46e8-8b51-4e7f378179cf-combined-ca-bundle\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.688050 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a675d52a-03e9-46e8-8b51-4e7f378179cf-logs\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.688075 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a675d52a-03e9-46e8-8b51-4e7f378179cf-horizon-tls-certs\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.688171 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a675d52a-03e9-46e8-8b51-4e7f378179cf-config-data\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.688237 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a675d52a-03e9-46e8-8b51-4e7f378179cf-horizon-secret-key\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.688297 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h64zn\" (UniqueName: \"kubernetes.io/projected/a675d52a-03e9-46e8-8b51-4e7f378179cf-kube-api-access-h64zn\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.789860 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a675d52a-03e9-46e8-8b51-4e7f378179cf-logs\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.789930 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-combined-ca-bundle\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.790057 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rklpp\" (UniqueName: \"kubernetes.io/projected/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-kube-api-access-rklpp\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.790088 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a675d52a-03e9-46e8-8b51-4e7f378179cf-horizon-tls-certs\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.790327 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a675d52a-03e9-46e8-8b51-4e7f378179cf-logs\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.791104 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a675d52a-03e9-46e8-8b51-4e7f378179cf-config-data\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.792301 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a675d52a-03e9-46e8-8b51-4e7f378179cf-config-data\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.792460 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a675d52a-03e9-46e8-8b51-4e7f378179cf-horizon-secret-key\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.792517 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-horizon-tls-certs\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.792572 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-scripts\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.792589 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-horizon-secret-key\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.792603 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-logs\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.792650 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h64zn\" (UniqueName: \"kubernetes.io/projected/a675d52a-03e9-46e8-8b51-4e7f378179cf-kube-api-access-h64zn\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.792727 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a675d52a-03e9-46e8-8b51-4e7f378179cf-scripts\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.792755 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a675d52a-03e9-46e8-8b51-4e7f378179cf-combined-ca-bundle\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.792784 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-config-data\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.793630 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a675d52a-03e9-46e8-8b51-4e7f378179cf-scripts\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.796112 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a675d52a-03e9-46e8-8b51-4e7f378179cf-horizon-tls-certs\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.804235 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a675d52a-03e9-46e8-8b51-4e7f378179cf-combined-ca-bundle\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.817991 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a675d52a-03e9-46e8-8b51-4e7f378179cf-horizon-secret-key\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.824166 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h64zn\" (UniqueName: \"kubernetes.io/projected/a675d52a-03e9-46e8-8b51-4e7f378179cf-kube-api-access-h64zn\") pod \"horizon-7b99dd8d64-9nrvl\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.874226 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.893798 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-horizon-tls-certs\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.893842 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-scripts\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.893869 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-horizon-secret-key\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.893887 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-logs\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.893926 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-config-data\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.893956 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-combined-ca-bundle\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.893976 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rklpp\" (UniqueName: \"kubernetes.io/projected/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-kube-api-access-rklpp\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.895083 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-logs\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.895332 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-scripts\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.896289 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-config-data\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.897627 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-horizon-tls-certs\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.898891 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-combined-ca-bundle\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.900763 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-horizon-secret-key\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.911302 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rklpp\" (UniqueName: \"kubernetes.io/projected/78aafb4d-470c-477d-bfe6-5b7a29b79fc0-kube-api-access-rklpp\") pod \"horizon-77d5fd455b-8kwkp\" (UID: \"78aafb4d-470c-477d-bfe6-5b7a29b79fc0\") " pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:52 crc kubenswrapper[4631]: I1204 17:49:52.999310 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:49:53 crc kubenswrapper[4631]: I1204 17:49:53.990206 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:49:54 crc kubenswrapper[4631]: I1204 17:49:54.052090 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-dsc5v"] Dec 04 17:49:54 crc kubenswrapper[4631]: I1204 17:49:54.052390 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" podUID="d8911a06-8f51-4c42-a76e-0daa74d11bed" containerName="dnsmasq-dns" containerID="cri-o://b094f76ae4b46ac32fd0f41c23f28941e9609f035b9c67b8c61a757d175f5cae" gracePeriod=10 Dec 04 17:49:55 crc kubenswrapper[4631]: I1204 17:49:55.007674 4631 generic.go:334] "Generic (PLEG): container finished" podID="d8911a06-8f51-4c42-a76e-0daa74d11bed" containerID="b094f76ae4b46ac32fd0f41c23f28941e9609f035b9c67b8c61a757d175f5cae" exitCode=0 Dec 04 17:49:55 crc kubenswrapper[4631]: I1204 17:49:55.007949 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" event={"ID":"d8911a06-8f51-4c42-a76e-0daa74d11bed","Type":"ContainerDied","Data":"b094f76ae4b46ac32fd0f41c23f28941e9609f035b9c67b8c61a757d175f5cae"} Dec 04 17:49:57 crc kubenswrapper[4631]: I1204 17:49:57.893347 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" podUID="d8911a06-8f51-4c42-a76e-0daa74d11bed" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: connect: connection refused" Dec 04 17:50:02 crc kubenswrapper[4631]: E1204 17:50:02.563789 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 04 17:50:02 crc kubenswrapper[4631]: E1204 17:50:02.564607 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n6bh96h68hbdhb8h65bh645h77hb5h5c9h567h65fhd4h8dh6dh699hdfh5f8h684h545h667hd8h599h5dchbdhb8h5fbh668h58bh9dh7ch79q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t2hvj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-57cbdfdf9-fpjrr_openstack(912d634b-beda-4d89-8dfa-ca97a1d3f54e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:50:02 crc kubenswrapper[4631]: E1204 17:50:02.568513 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-57cbdfdf9-fpjrr" podUID="912d634b-beda-4d89-8dfa-ca97a1d3f54e" Dec 04 17:50:02 crc kubenswrapper[4631]: I1204 17:50:02.892967 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" podUID="d8911a06-8f51-4c42-a76e-0daa74d11bed" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: connect: connection refused" Dec 04 17:50:04 crc kubenswrapper[4631]: E1204 17:50:04.298948 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 04 17:50:04 crc kubenswrapper[4631]: E1204 17:50:04.299289 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5b7h5c7h75h689h54chdh64h56h5f9h5d5h65dh7bhc6h58dh5cfh574h66bh5cdh694h5c6h559h5ddhfch75h5c6h5b8h5c7h68hbh4h7dh66bq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cd27b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-6d7dd8f89f-qsbxc_openstack(56ad69c1-bd7a-4147-9a1b-3119818cbe62): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:50:04 crc kubenswrapper[4631]: E1204 17:50:04.301023 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-6d7dd8f89f-qsbxc" podUID="56ad69c1-bd7a-4147-9a1b-3119818cbe62" Dec 04 17:50:04 crc kubenswrapper[4631]: E1204 17:50:04.307222 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 04 17:50:04 crc kubenswrapper[4631]: E1204 17:50:04.307565 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nb4h57fh647h57fhf4h559h97h665h59dhf6h5d5h56ch588hb7h586h66dh66dh68dh86hb4hf9hffh56h577h56ch576h86h576h75h557h67bhb5q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2fks5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-577ccf5fbc-b5scw_openstack(35c89cd7-210c-4475-9ffc-794e3f3456ac): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:50:04 crc kubenswrapper[4631]: E1204 17:50:04.309262 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-577ccf5fbc-b5scw" podUID="35c89cd7-210c-4475-9ffc-794e3f3456ac" Dec 04 17:50:06 crc kubenswrapper[4631]: I1204 17:50:06.023525 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:50:06 crc kubenswrapper[4631]: I1204 17:50:06.023913 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:50:06 crc kubenswrapper[4631]: I1204 17:50:06.129983 4631 generic.go:334] "Generic (PLEG): container finished" podID="30425aae-4c9c-445c-8d10-d4e5874fda30" containerID="8093fdc24b1052668163f4b7f21ebb473ba522ed88cd98c8f3c711e54ec19ab2" exitCode=0 Dec 04 17:50:06 crc kubenswrapper[4631]: I1204 17:50:06.130273 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-4hqks" event={"ID":"30425aae-4c9c-445c-8d10-d4e5874fda30","Type":"ContainerDied","Data":"8093fdc24b1052668163f4b7f21ebb473ba522ed88cd98c8f3c711e54ec19ab2"} Dec 04 17:50:10 crc kubenswrapper[4631]: I1204 17:50:10.163676 4631 generic.go:334] "Generic (PLEG): container finished" podID="d3aa97a3-43fb-4478-bd4e-e06494e42efd" containerID="1e81b276fa391be2298e89d2f2828e7d7929e13f82aacfca1ccae4831bcdf449" exitCode=0 Dec 04 17:50:10 crc kubenswrapper[4631]: I1204 17:50:10.165185 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-5dhm2" event={"ID":"d3aa97a3-43fb-4478-bd4e-e06494e42efd","Type":"ContainerDied","Data":"1e81b276fa391be2298e89d2f2828e7d7929e13f82aacfca1ccae4831bcdf449"} Dec 04 17:50:12 crc kubenswrapper[4631]: I1204 17:50:12.764003 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:50:12 crc kubenswrapper[4631]: I1204 17:50:12.893557 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" podUID="d8911a06-8f51-4c42-a76e-0daa74d11bed" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: i/o timeout" Dec 04 17:50:12 crc kubenswrapper[4631]: I1204 17:50:12.893817 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:50:12 crc kubenswrapper[4631]: I1204 17:50:12.916593 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-fernet-keys\") pod \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " Dec 04 17:50:12 crc kubenswrapper[4631]: I1204 17:50:12.916730 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8frtm\" (UniqueName: \"kubernetes.io/projected/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-kube-api-access-8frtm\") pod \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " Dec 04 17:50:12 crc kubenswrapper[4631]: I1204 17:50:12.916896 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-credential-keys\") pod \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " Dec 04 17:50:12 crc kubenswrapper[4631]: I1204 17:50:12.916924 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-scripts\") pod \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " Dec 04 17:50:12 crc kubenswrapper[4631]: I1204 17:50:12.916963 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-combined-ca-bundle\") pod \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " Dec 04 17:50:12 crc kubenswrapper[4631]: I1204 17:50:12.916992 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-config-data\") pod \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\" (UID: \"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0\") " Dec 04 17:50:12 crc kubenswrapper[4631]: I1204 17:50:12.930167 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0" (UID: "beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:12 crc kubenswrapper[4631]: I1204 17:50:12.943497 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-kube-api-access-8frtm" (OuterVolumeSpecName: "kube-api-access-8frtm") pod "beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0" (UID: "beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0"). InnerVolumeSpecName "kube-api-access-8frtm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:50:12 crc kubenswrapper[4631]: I1204 17:50:12.943656 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0" (UID: "beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:12 crc kubenswrapper[4631]: I1204 17:50:12.943810 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-scripts" (OuterVolumeSpecName: "scripts") pod "beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0" (UID: "beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:12 crc kubenswrapper[4631]: I1204 17:50:12.954893 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-config-data" (OuterVolumeSpecName: "config-data") pod "beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0" (UID: "beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:12 crc kubenswrapper[4631]: I1204 17:50:12.966142 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0" (UID: "beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:13 crc kubenswrapper[4631]: I1204 17:50:13.019610 4631 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:13 crc kubenswrapper[4631]: I1204 17:50:13.019643 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:13 crc kubenswrapper[4631]: I1204 17:50:13.019652 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:13 crc kubenswrapper[4631]: I1204 17:50:13.019661 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:13 crc kubenswrapper[4631]: I1204 17:50:13.019668 4631 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:13 crc kubenswrapper[4631]: I1204 17:50:13.019677 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8frtm\" (UniqueName: \"kubernetes.io/projected/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0-kube-api-access-8frtm\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:13 crc kubenswrapper[4631]: I1204 17:50:13.229533 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xhwzb" event={"ID":"beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0","Type":"ContainerDied","Data":"01c5dfad59f4c8f9f0226b6dee97402308c3cb0c73d101dc109c31f00da4d562"} Dec 04 17:50:13 crc kubenswrapper[4631]: I1204 17:50:13.229568 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="01c5dfad59f4c8f9f0226b6dee97402308c3cb0c73d101dc109c31f00da4d562" Dec 04 17:50:13 crc kubenswrapper[4631]: I1204 17:50:13.229632 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xhwzb" Dec 04 17:50:13 crc kubenswrapper[4631]: I1204 17:50:13.940763 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-xhwzb"] Dec 04 17:50:13 crc kubenswrapper[4631]: I1204 17:50:13.947805 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-xhwzb"] Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.053145 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-gq8bc"] Dec 04 17:50:14 crc kubenswrapper[4631]: E1204 17:50:14.053557 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0" containerName="keystone-bootstrap" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.053578 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0" containerName="keystone-bootstrap" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.053797 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0" containerName="keystone-bootstrap" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.054410 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.056338 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.056740 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.056941 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.057263 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-sxcqt" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.057545 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.075251 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-gq8bc"] Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.158567 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-credential-keys\") pod \"keystone-bootstrap-gq8bc\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.158933 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-combined-ca-bundle\") pod \"keystone-bootstrap-gq8bc\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.159077 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-fernet-keys\") pod \"keystone-bootstrap-gq8bc\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.159224 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4l49r\" (UniqueName: \"kubernetes.io/projected/3171d16d-db61-4d69-b9c7-262da016be91-kube-api-access-4l49r\") pod \"keystone-bootstrap-gq8bc\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.159350 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-scripts\") pod \"keystone-bootstrap-gq8bc\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.159538 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-config-data\") pod \"keystone-bootstrap-gq8bc\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.248780 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0" path="/var/lib/kubelet/pods/beaabe76-a7b6-4f1c-b5fb-fef60a13e9f0/volumes" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.260807 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-credential-keys\") pod \"keystone-bootstrap-gq8bc\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.261049 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-combined-ca-bundle\") pod \"keystone-bootstrap-gq8bc\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.261163 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-fernet-keys\") pod \"keystone-bootstrap-gq8bc\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.261661 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4l49r\" (UniqueName: \"kubernetes.io/projected/3171d16d-db61-4d69-b9c7-262da016be91-kube-api-access-4l49r\") pod \"keystone-bootstrap-gq8bc\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.261993 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-scripts\") pod \"keystone-bootstrap-gq8bc\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.262130 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-config-data\") pod \"keystone-bootstrap-gq8bc\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.265907 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-credential-keys\") pod \"keystone-bootstrap-gq8bc\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.266015 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-fernet-keys\") pod \"keystone-bootstrap-gq8bc\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.266730 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-scripts\") pod \"keystone-bootstrap-gq8bc\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.267064 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-config-data\") pod \"keystone-bootstrap-gq8bc\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.269247 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-combined-ca-bundle\") pod \"keystone-bootstrap-gq8bc\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.281475 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4l49r\" (UniqueName: \"kubernetes.io/projected/3171d16d-db61-4d69-b9c7-262da016be91-kube-api-access-4l49r\") pod \"keystone-bootstrap-gq8bc\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:14 crc kubenswrapper[4631]: I1204 17:50:14.370189 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:16 crc kubenswrapper[4631]: E1204 17:50:16.988799 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 04 17:50:16 crc kubenswrapper[4631]: E1204 17:50:16.989281 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dt5fr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-jbcc2_openstack(d0aff05c-75cd-495a-903e-83b72596bf86): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:50:16 crc kubenswrapper[4631]: E1204 17:50:16.991283 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-jbcc2" podUID="d0aff05c-75cd-495a-903e-83b72596bf86" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.085918 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-57cbdfdf9-fpjrr" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.211947 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/912d634b-beda-4d89-8dfa-ca97a1d3f54e-logs\") pod \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.212016 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/912d634b-beda-4d89-8dfa-ca97a1d3f54e-config-data\") pod \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.212076 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/912d634b-beda-4d89-8dfa-ca97a1d3f54e-horizon-secret-key\") pod \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.212097 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t2hvj\" (UniqueName: \"kubernetes.io/projected/912d634b-beda-4d89-8dfa-ca97a1d3f54e-kube-api-access-t2hvj\") pod \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.212180 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/912d634b-beda-4d89-8dfa-ca97a1d3f54e-scripts\") pod \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\" (UID: \"912d634b-beda-4d89-8dfa-ca97a1d3f54e\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.212657 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/912d634b-beda-4d89-8dfa-ca97a1d3f54e-logs" (OuterVolumeSpecName: "logs") pod "912d634b-beda-4d89-8dfa-ca97a1d3f54e" (UID: "912d634b-beda-4d89-8dfa-ca97a1d3f54e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.212880 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/912d634b-beda-4d89-8dfa-ca97a1d3f54e-scripts" (OuterVolumeSpecName: "scripts") pod "912d634b-beda-4d89-8dfa-ca97a1d3f54e" (UID: "912d634b-beda-4d89-8dfa-ca97a1d3f54e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.213471 4631 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/912d634b-beda-4d89-8dfa-ca97a1d3f54e-logs\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.213509 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/912d634b-beda-4d89-8dfa-ca97a1d3f54e-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.214001 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/912d634b-beda-4d89-8dfa-ca97a1d3f54e-config-data" (OuterVolumeSpecName: "config-data") pod "912d634b-beda-4d89-8dfa-ca97a1d3f54e" (UID: "912d634b-beda-4d89-8dfa-ca97a1d3f54e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.218487 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/912d634b-beda-4d89-8dfa-ca97a1d3f54e-kube-api-access-t2hvj" (OuterVolumeSpecName: "kube-api-access-t2hvj") pod "912d634b-beda-4d89-8dfa-ca97a1d3f54e" (UID: "912d634b-beda-4d89-8dfa-ca97a1d3f54e"). InnerVolumeSpecName "kube-api-access-t2hvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.218508 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/912d634b-beda-4d89-8dfa-ca97a1d3f54e-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "912d634b-beda-4d89-8dfa-ca97a1d3f54e" (UID: "912d634b-beda-4d89-8dfa-ca97a1d3f54e"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.265917 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-57cbdfdf9-fpjrr" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.266514 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-57cbdfdf9-fpjrr" event={"ID":"912d634b-beda-4d89-8dfa-ca97a1d3f54e","Type":"ContainerDied","Data":"9e3d206a71a7493d64b92cc8b558d1c9f0ee4700d10d005b4faa5dcac613d2b6"} Dec 04 17:50:17 crc kubenswrapper[4631]: E1204 17:50:17.267771 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-jbcc2" podUID="d0aff05c-75cd-495a-903e-83b72596bf86" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.331026 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/912d634b-beda-4d89-8dfa-ca97a1d3f54e-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.331214 4631 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/912d634b-beda-4d89-8dfa-ca97a1d3f54e-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.331227 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t2hvj\" (UniqueName: \"kubernetes.io/projected/912d634b-beda-4d89-8dfa-ca97a1d3f54e-kube-api-access-t2hvj\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.367551 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-57cbdfdf9-fpjrr"] Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.374704 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-57cbdfdf9-fpjrr"] Dec 04 17:50:17 crc kubenswrapper[4631]: E1204 17:50:17.458121 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Dec 04 17:50:17 crc kubenswrapper[4631]: E1204 17:50:17.458283 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n558h84h8ch7bh67bh59ch6dhb6h5d9h9fh5cfh8fh548h585hf9h94h98h5bh84h557h544h58bh7h5f5h688h665hdch68fh58hc5h696h67bq,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8xbdd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(fc7c63c8-d4b4-49fd-81a7-1720d58d2934): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.565090 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d7dd8f89f-qsbxc" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.578222 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-5dhm2" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.597358 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-4hqks" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.601229 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.608851 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-577ccf5fbc-b5scw" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.737263 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/56ad69c1-bd7a-4147-9a1b-3119818cbe62-config-data\") pod \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.737333 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-config\") pod \"d8911a06-8f51-4c42-a76e-0daa74d11bed\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.737355 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3aa97a3-43fb-4478-bd4e-e06494e42efd-combined-ca-bundle\") pod \"d3aa97a3-43fb-4478-bd4e-e06494e42efd\" (UID: \"d3aa97a3-43fb-4478-bd4e-e06494e42efd\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.737430 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-ovsdbserver-sb\") pod \"d8911a06-8f51-4c42-a76e-0daa74d11bed\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.737586 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-dns-svc\") pod \"d8911a06-8f51-4c42-a76e-0daa74d11bed\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.737667 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cd27b\" (UniqueName: \"kubernetes.io/projected/56ad69c1-bd7a-4147-9a1b-3119818cbe62-kube-api-access-cd27b\") pod \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.737693 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35c89cd7-210c-4475-9ffc-794e3f3456ac-config-data\") pod \"35c89cd7-210c-4475-9ffc-794e3f3456ac\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.737720 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fks5\" (UniqueName: \"kubernetes.io/projected/35c89cd7-210c-4475-9ffc-794e3f3456ac-kube-api-access-2fks5\") pod \"35c89cd7-210c-4475-9ffc-794e3f3456ac\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.737752 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/56ad69c1-bd7a-4147-9a1b-3119818cbe62-scripts\") pod \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.737780 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/35c89cd7-210c-4475-9ffc-794e3f3456ac-scripts\") pod \"35c89cd7-210c-4475-9ffc-794e3f3456ac\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.737797 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-dns-swift-storage-0\") pod \"d8911a06-8f51-4c42-a76e-0daa74d11bed\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.737846 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d3aa97a3-43fb-4478-bd4e-e06494e42efd-config\") pod \"d3aa97a3-43fb-4478-bd4e-e06494e42efd\" (UID: \"d3aa97a3-43fb-4478-bd4e-e06494e42efd\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.737867 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30425aae-4c9c-445c-8d10-d4e5874fda30-config-data\") pod \"30425aae-4c9c-445c-8d10-d4e5874fda30\" (UID: \"30425aae-4c9c-445c-8d10-d4e5874fda30\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.737932 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30425aae-4c9c-445c-8d10-d4e5874fda30-combined-ca-bundle\") pod \"30425aae-4c9c-445c-8d10-d4e5874fda30\" (UID: \"30425aae-4c9c-445c-8d10-d4e5874fda30\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.737991 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/30425aae-4c9c-445c-8d10-d4e5874fda30-db-sync-config-data\") pod \"30425aae-4c9c-445c-8d10-d4e5874fda30\" (UID: \"30425aae-4c9c-445c-8d10-d4e5874fda30\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.738017 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q78vw\" (UniqueName: \"kubernetes.io/projected/d3aa97a3-43fb-4478-bd4e-e06494e42efd-kube-api-access-q78vw\") pod \"d3aa97a3-43fb-4478-bd4e-e06494e42efd\" (UID: \"d3aa97a3-43fb-4478-bd4e-e06494e42efd\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.738171 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/56ad69c1-bd7a-4147-9a1b-3119818cbe62-horizon-secret-key\") pod \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.738222 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zf9lq\" (UniqueName: \"kubernetes.io/projected/30425aae-4c9c-445c-8d10-d4e5874fda30-kube-api-access-zf9lq\") pod \"30425aae-4c9c-445c-8d10-d4e5874fda30\" (UID: \"30425aae-4c9c-445c-8d10-d4e5874fda30\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.738266 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rj5jj\" (UniqueName: \"kubernetes.io/projected/d8911a06-8f51-4c42-a76e-0daa74d11bed-kube-api-access-rj5jj\") pod \"d8911a06-8f51-4c42-a76e-0daa74d11bed\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.738293 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35c89cd7-210c-4475-9ffc-794e3f3456ac-logs\") pod \"35c89cd7-210c-4475-9ffc-794e3f3456ac\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.738310 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56ad69c1-bd7a-4147-9a1b-3119818cbe62-logs\") pod \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\" (UID: \"56ad69c1-bd7a-4147-9a1b-3119818cbe62\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.738418 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/35c89cd7-210c-4475-9ffc-794e3f3456ac-horizon-secret-key\") pod \"35c89cd7-210c-4475-9ffc-794e3f3456ac\" (UID: \"35c89cd7-210c-4475-9ffc-794e3f3456ac\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.738441 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-ovsdbserver-nb\") pod \"d8911a06-8f51-4c42-a76e-0daa74d11bed\" (UID: \"d8911a06-8f51-4c42-a76e-0daa74d11bed\") " Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.739492 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56ad69c1-bd7a-4147-9a1b-3119818cbe62-config-data" (OuterVolumeSpecName: "config-data") pod "56ad69c1-bd7a-4147-9a1b-3119818cbe62" (UID: "56ad69c1-bd7a-4147-9a1b-3119818cbe62"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.740317 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35c89cd7-210c-4475-9ffc-794e3f3456ac-scripts" (OuterVolumeSpecName: "scripts") pod "35c89cd7-210c-4475-9ffc-794e3f3456ac" (UID: "35c89cd7-210c-4475-9ffc-794e3f3456ac"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.740636 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56ad69c1-bd7a-4147-9a1b-3119818cbe62-scripts" (OuterVolumeSpecName: "scripts") pod "56ad69c1-bd7a-4147-9a1b-3119818cbe62" (UID: "56ad69c1-bd7a-4147-9a1b-3119818cbe62"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.744198 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30425aae-4c9c-445c-8d10-d4e5874fda30-kube-api-access-zf9lq" (OuterVolumeSpecName: "kube-api-access-zf9lq") pod "30425aae-4c9c-445c-8d10-d4e5874fda30" (UID: "30425aae-4c9c-445c-8d10-d4e5874fda30"). InnerVolumeSpecName "kube-api-access-zf9lq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.746338 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56ad69c1-bd7a-4147-9a1b-3119818cbe62-kube-api-access-cd27b" (OuterVolumeSpecName: "kube-api-access-cd27b") pod "56ad69c1-bd7a-4147-9a1b-3119818cbe62" (UID: "56ad69c1-bd7a-4147-9a1b-3119818cbe62"). InnerVolumeSpecName "kube-api-access-cd27b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.752996 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35c89cd7-210c-4475-9ffc-794e3f3456ac-kube-api-access-2fks5" (OuterVolumeSpecName: "kube-api-access-2fks5") pod "35c89cd7-210c-4475-9ffc-794e3f3456ac" (UID: "35c89cd7-210c-4475-9ffc-794e3f3456ac"). InnerVolumeSpecName "kube-api-access-2fks5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.753458 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35c89cd7-210c-4475-9ffc-794e3f3456ac-config-data" (OuterVolumeSpecName: "config-data") pod "35c89cd7-210c-4475-9ffc-794e3f3456ac" (UID: "35c89cd7-210c-4475-9ffc-794e3f3456ac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.759219 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56ad69c1-bd7a-4147-9a1b-3119818cbe62-logs" (OuterVolumeSpecName: "logs") pod "56ad69c1-bd7a-4147-9a1b-3119818cbe62" (UID: "56ad69c1-bd7a-4147-9a1b-3119818cbe62"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.759460 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35c89cd7-210c-4475-9ffc-794e3f3456ac-logs" (OuterVolumeSpecName: "logs") pod "35c89cd7-210c-4475-9ffc-794e3f3456ac" (UID: "35c89cd7-210c-4475-9ffc-794e3f3456ac"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.763654 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3aa97a3-43fb-4478-bd4e-e06494e42efd-kube-api-access-q78vw" (OuterVolumeSpecName: "kube-api-access-q78vw") pod "d3aa97a3-43fb-4478-bd4e-e06494e42efd" (UID: "d3aa97a3-43fb-4478-bd4e-e06494e42efd"). InnerVolumeSpecName "kube-api-access-q78vw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.766573 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35c89cd7-210c-4475-9ffc-794e3f3456ac-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "35c89cd7-210c-4475-9ffc-794e3f3456ac" (UID: "35c89cd7-210c-4475-9ffc-794e3f3456ac"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.768616 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30425aae-4c9c-445c-8d10-d4e5874fda30-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "30425aae-4c9c-445c-8d10-d4e5874fda30" (UID: "30425aae-4c9c-445c-8d10-d4e5874fda30"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.780340 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8911a06-8f51-4c42-a76e-0daa74d11bed-kube-api-access-rj5jj" (OuterVolumeSpecName: "kube-api-access-rj5jj") pod "d8911a06-8f51-4c42-a76e-0daa74d11bed" (UID: "d8911a06-8f51-4c42-a76e-0daa74d11bed"). InnerVolumeSpecName "kube-api-access-rj5jj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.783363 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56ad69c1-bd7a-4147-9a1b-3119818cbe62-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "56ad69c1-bd7a-4147-9a1b-3119818cbe62" (UID: "56ad69c1-bd7a-4147-9a1b-3119818cbe62"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.783877 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3aa97a3-43fb-4478-bd4e-e06494e42efd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d3aa97a3-43fb-4478-bd4e-e06494e42efd" (UID: "d3aa97a3-43fb-4478-bd4e-e06494e42efd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.790572 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30425aae-4c9c-445c-8d10-d4e5874fda30-config-data" (OuterVolumeSpecName: "config-data") pod "30425aae-4c9c-445c-8d10-d4e5874fda30" (UID: "30425aae-4c9c-445c-8d10-d4e5874fda30"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.801148 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-config" (OuterVolumeSpecName: "config") pod "d8911a06-8f51-4c42-a76e-0daa74d11bed" (UID: "d8911a06-8f51-4c42-a76e-0daa74d11bed"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.805186 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3aa97a3-43fb-4478-bd4e-e06494e42efd-config" (OuterVolumeSpecName: "config") pod "d3aa97a3-43fb-4478-bd4e-e06494e42efd" (UID: "d3aa97a3-43fb-4478-bd4e-e06494e42efd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.805469 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30425aae-4c9c-445c-8d10-d4e5874fda30-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "30425aae-4c9c-445c-8d10-d4e5874fda30" (UID: "30425aae-4c9c-445c-8d10-d4e5874fda30"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.807972 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d8911a06-8f51-4c42-a76e-0daa74d11bed" (UID: "d8911a06-8f51-4c42-a76e-0daa74d11bed"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.810681 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d8911a06-8f51-4c42-a76e-0daa74d11bed" (UID: "d8911a06-8f51-4c42-a76e-0daa74d11bed"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.815605 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d8911a06-8f51-4c42-a76e-0daa74d11bed" (UID: "d8911a06-8f51-4c42-a76e-0daa74d11bed"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.817276 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d8911a06-8f51-4c42-a76e-0daa74d11bed" (UID: "d8911a06-8f51-4c42-a76e-0daa74d11bed"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.840823 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/d3aa97a3-43fb-4478-bd4e-e06494e42efd-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.840849 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30425aae-4c9c-445c-8d10-d4e5874fda30-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.840858 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30425aae-4c9c-445c-8d10-d4e5874fda30-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.840870 4631 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/30425aae-4c9c-445c-8d10-d4e5874fda30-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.840878 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q78vw\" (UniqueName: \"kubernetes.io/projected/d3aa97a3-43fb-4478-bd4e-e06494e42efd-kube-api-access-q78vw\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.840887 4631 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/56ad69c1-bd7a-4147-9a1b-3119818cbe62-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.840895 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zf9lq\" (UniqueName: \"kubernetes.io/projected/30425aae-4c9c-445c-8d10-d4e5874fda30-kube-api-access-zf9lq\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.840904 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rj5jj\" (UniqueName: \"kubernetes.io/projected/d8911a06-8f51-4c42-a76e-0daa74d11bed-kube-api-access-rj5jj\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.840913 4631 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35c89cd7-210c-4475-9ffc-794e3f3456ac-logs\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.840928 4631 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56ad69c1-bd7a-4147-9a1b-3119818cbe62-logs\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.840936 4631 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/35c89cd7-210c-4475-9ffc-794e3f3456ac-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.840944 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.840952 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/56ad69c1-bd7a-4147-9a1b-3119818cbe62-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.840959 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.840967 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3aa97a3-43fb-4478-bd4e-e06494e42efd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.840977 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.840986 4631 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.840994 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cd27b\" (UniqueName: \"kubernetes.io/projected/56ad69c1-bd7a-4147-9a1b-3119818cbe62-kube-api-access-cd27b\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.841002 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/35c89cd7-210c-4475-9ffc-794e3f3456ac-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.841011 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fks5\" (UniqueName: \"kubernetes.io/projected/35c89cd7-210c-4475-9ffc-794e3f3456ac-kube-api-access-2fks5\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.841018 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/56ad69c1-bd7a-4147-9a1b-3119818cbe62-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.841027 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/35c89cd7-210c-4475-9ffc-794e3f3456ac-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.841034 4631 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d8911a06-8f51-4c42-a76e-0daa74d11bed-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:17 crc kubenswrapper[4631]: I1204 17:50:17.894084 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" podUID="d8911a06-8f51-4c42-a76e-0daa74d11bed" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: i/o timeout" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.251924 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="912d634b-beda-4d89-8dfa-ca97a1d3f54e" path="/var/lib/kubelet/pods/912d634b-beda-4d89-8dfa-ca97a1d3f54e/volumes" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.310231 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-577ccf5fbc-b5scw" event={"ID":"35c89cd7-210c-4475-9ffc-794e3f3456ac","Type":"ContainerDied","Data":"545e14f888b5ad37956504c9e022c6a9fa76df1edd469df41e2e3f4a2015bc58"} Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.310260 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-577ccf5fbc-b5scw" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.312223 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6d7dd8f89f-qsbxc" event={"ID":"56ad69c1-bd7a-4147-9a1b-3119818cbe62","Type":"ContainerDied","Data":"9dea12ebd6c89ee4e57fc9d9419662f71503401df14efd0a414477dd274a00d7"} Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.312296 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6d7dd8f89f-qsbxc" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.316494 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.316489 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-dsc5v" event={"ID":"d8911a06-8f51-4c42-a76e-0daa74d11bed","Type":"ContainerDied","Data":"6632f359994806bef560616dec39cf39a860ae6d0ce8d5b2474167cd73df7be0"} Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.316611 4631 scope.go:117] "RemoveContainer" containerID="b094f76ae4b46ac32fd0f41c23f28941e9609f035b9c67b8c61a757d175f5cae" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.323179 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-5dhm2" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.323197 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-5dhm2" event={"ID":"d3aa97a3-43fb-4478-bd4e-e06494e42efd","Type":"ContainerDied","Data":"80d354d1e8eb35d740a44a9ad41f61bd590a7ed753dc82b87b84a83bbdea7672"} Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.323225 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80d354d1e8eb35d740a44a9ad41f61bd590a7ed753dc82b87b84a83bbdea7672" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.326646 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-4hqks" event={"ID":"30425aae-4c9c-445c-8d10-d4e5874fda30","Type":"ContainerDied","Data":"05f31d63faf1a4b9c71aa02e8811f056611f3be3e88356f790cab4cffa95dc16"} Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.326673 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05f31d63faf1a4b9c71aa02e8811f056611f3be3e88356f790cab4cffa95dc16" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.326711 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-4hqks" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.386074 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6d7dd8f89f-qsbxc"] Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.394743 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6d7dd8f89f-qsbxc"] Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.411782 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-dsc5v"] Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.421546 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-dsc5v"] Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.435705 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-577ccf5fbc-b5scw"] Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.442230 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-577ccf5fbc-b5scw"] Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.775946 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-65965d6475-cbpr4"] Dec 04 17:50:18 crc kubenswrapper[4631]: E1204 17:50:18.776284 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3aa97a3-43fb-4478-bd4e-e06494e42efd" containerName="neutron-db-sync" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.776300 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3aa97a3-43fb-4478-bd4e-e06494e42efd" containerName="neutron-db-sync" Dec 04 17:50:18 crc kubenswrapper[4631]: E1204 17:50:18.776310 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8911a06-8f51-4c42-a76e-0daa74d11bed" containerName="init" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.776317 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8911a06-8f51-4c42-a76e-0daa74d11bed" containerName="init" Dec 04 17:50:18 crc kubenswrapper[4631]: E1204 17:50:18.776343 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30425aae-4c9c-445c-8d10-d4e5874fda30" containerName="glance-db-sync" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.776349 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="30425aae-4c9c-445c-8d10-d4e5874fda30" containerName="glance-db-sync" Dec 04 17:50:18 crc kubenswrapper[4631]: E1204 17:50:18.776360 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8911a06-8f51-4c42-a76e-0daa74d11bed" containerName="dnsmasq-dns" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.776365 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8911a06-8f51-4c42-a76e-0daa74d11bed" containerName="dnsmasq-dns" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.776520 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3aa97a3-43fb-4478-bd4e-e06494e42efd" containerName="neutron-db-sync" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.776536 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8911a06-8f51-4c42-a76e-0daa74d11bed" containerName="dnsmasq-dns" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.776548 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="30425aae-4c9c-445c-8d10-d4e5874fda30" containerName="glance-db-sync" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.777359 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.795964 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-65965d6475-cbpr4"] Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.855950 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-ovsdbserver-nb\") pod \"dnsmasq-dns-65965d6475-cbpr4\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.855994 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-ovsdbserver-sb\") pod \"dnsmasq-dns-65965d6475-cbpr4\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.856021 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-dns-swift-storage-0\") pod \"dnsmasq-dns-65965d6475-cbpr4\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.856066 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svsds\" (UniqueName: \"kubernetes.io/projected/57d24425-a663-4dac-a200-f570f84cc1ea-kube-api-access-svsds\") pod \"dnsmasq-dns-65965d6475-cbpr4\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.856086 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-dns-svc\") pod \"dnsmasq-dns-65965d6475-cbpr4\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.856110 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-config\") pod \"dnsmasq-dns-65965d6475-cbpr4\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:18 crc kubenswrapper[4631]: E1204 17:50:18.943462 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 04 17:50:18 crc kubenswrapper[4631]: E1204 17:50:18.943607 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hqkdh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-49bhb_openstack(caa9015d-d530-4caa-8a24-2338d69519a3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:50:18 crc kubenswrapper[4631]: E1204 17:50:18.944924 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-49bhb" podUID="caa9015d-d530-4caa-8a24-2338d69519a3" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.957359 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svsds\" (UniqueName: \"kubernetes.io/projected/57d24425-a663-4dac-a200-f570f84cc1ea-kube-api-access-svsds\") pod \"dnsmasq-dns-65965d6475-cbpr4\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.957417 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-dns-svc\") pod \"dnsmasq-dns-65965d6475-cbpr4\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.957443 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-config\") pod \"dnsmasq-dns-65965d6475-cbpr4\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.957523 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-ovsdbserver-nb\") pod \"dnsmasq-dns-65965d6475-cbpr4\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.957547 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-ovsdbserver-sb\") pod \"dnsmasq-dns-65965d6475-cbpr4\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.957565 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-dns-swift-storage-0\") pod \"dnsmasq-dns-65965d6475-cbpr4\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.958345 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-dns-swift-storage-0\") pod \"dnsmasq-dns-65965d6475-cbpr4\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.959189 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-dns-svc\") pod \"dnsmasq-dns-65965d6475-cbpr4\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.959737 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-config\") pod \"dnsmasq-dns-65965d6475-cbpr4\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.960232 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-ovsdbserver-nb\") pod \"dnsmasq-dns-65965d6475-cbpr4\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:18 crc kubenswrapper[4631]: I1204 17:50:18.961238 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-ovsdbserver-sb\") pod \"dnsmasq-dns-65965d6475-cbpr4\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.017496 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svsds\" (UniqueName: \"kubernetes.io/projected/57d24425-a663-4dac-a200-f570f84cc1ea-kube-api-access-svsds\") pod \"dnsmasq-dns-65965d6475-cbpr4\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.030291 4631 scope.go:117] "RemoveContainer" containerID="0719d53a5af1d545581af2f5e78711c9687418fa9c1eb4c4f8b81649db285098" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.070473 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-55d98d789b-mxmt5"] Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.073296 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.085111 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-6vns7" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.085617 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.086349 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.086584 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.119122 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.135302 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-55d98d789b-mxmt5"] Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.173785 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-ovndb-tls-certs\") pod \"neutron-55d98d789b-mxmt5\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.173948 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-combined-ca-bundle\") pod \"neutron-55d98d789b-mxmt5\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.173982 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6sn7p\" (UniqueName: \"kubernetes.io/projected/a2a21943-309a-4d97-9e94-5f3248de544c-kube-api-access-6sn7p\") pod \"neutron-55d98d789b-mxmt5\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.174024 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-config\") pod \"neutron-55d98d789b-mxmt5\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.174057 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-httpd-config\") pod \"neutron-55d98d789b-mxmt5\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.296285 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65965d6475-cbpr4"] Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.305306 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-combined-ca-bundle\") pod \"neutron-55d98d789b-mxmt5\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.305349 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6sn7p\" (UniqueName: \"kubernetes.io/projected/a2a21943-309a-4d97-9e94-5f3248de544c-kube-api-access-6sn7p\") pod \"neutron-55d98d789b-mxmt5\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.305413 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-config\") pod \"neutron-55d98d789b-mxmt5\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.305443 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-httpd-config\") pod \"neutron-55d98d789b-mxmt5\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.305562 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-ovndb-tls-certs\") pod \"neutron-55d98d789b-mxmt5\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.327069 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-m9cnv"] Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.333215 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.341622 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-m9cnv"] Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.347531 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-httpd-config\") pod \"neutron-55d98d789b-mxmt5\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.351989 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-config\") pod \"neutron-55d98d789b-mxmt5\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.353345 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-combined-ca-bundle\") pod \"neutron-55d98d789b-mxmt5\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.353498 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-ovndb-tls-certs\") pod \"neutron-55d98d789b-mxmt5\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.409960 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6sn7p\" (UniqueName: \"kubernetes.io/projected/a2a21943-309a-4d97-9e94-5f3248de544c-kube-api-access-6sn7p\") pod \"neutron-55d98d789b-mxmt5\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.422037 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-m9cnv\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.422173 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-m9cnv\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.422360 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-m9cnv\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.422416 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-m9cnv\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.422630 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-config\") pod \"dnsmasq-dns-84b966f6c9-m9cnv\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.422648 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vs9xb\" (UniqueName: \"kubernetes.io/projected/1a4004ec-e678-4d85-a83a-b9af1bd78865-kube-api-access-vs9xb\") pod \"dnsmasq-dns-84b966f6c9-m9cnv\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:19 crc kubenswrapper[4631]: E1204 17:50:19.444585 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-49bhb" podUID="caa9015d-d530-4caa-8a24-2338d69519a3" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.475314 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.531281 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-m9cnv\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.531325 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-m9cnv\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.531348 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-m9cnv\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.531469 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-config\") pod \"dnsmasq-dns-84b966f6c9-m9cnv\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.531485 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vs9xb\" (UniqueName: \"kubernetes.io/projected/1a4004ec-e678-4d85-a83a-b9af1bd78865-kube-api-access-vs9xb\") pod \"dnsmasq-dns-84b966f6c9-m9cnv\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.531521 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-m9cnv\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.532837 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-config\") pod \"dnsmasq-dns-84b966f6c9-m9cnv\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.533135 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-m9cnv\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.533180 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-m9cnv\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.533896 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-m9cnv\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.534171 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-m9cnv\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.561018 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vs9xb\" (UniqueName: \"kubernetes.io/projected/1a4004ec-e678-4d85-a83a-b9af1bd78865-kube-api-access-vs9xb\") pod \"dnsmasq-dns-84b966f6c9-m9cnv\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:19 crc kubenswrapper[4631]: I1204 17:50:19.725852 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.278335 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35c89cd7-210c-4475-9ffc-794e3f3456ac" path="/var/lib/kubelet/pods/35c89cd7-210c-4475-9ffc-794e3f3456ac/volumes" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.279095 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56ad69c1-bd7a-4147-9a1b-3119818cbe62" path="/var/lib/kubelet/pods/56ad69c1-bd7a-4147-9a1b-3119818cbe62/volumes" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.279549 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8911a06-8f51-4c42-a76e-0daa74d11bed" path="/var/lib/kubelet/pods/d8911a06-8f51-4c42-a76e-0daa74d11bed/volumes" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.298486 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.301086 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.311795 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.311950 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.312094 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-ntp7m" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.334885 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.390224 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.398444 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.403016 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.415315 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.450877 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-tf4br" event={"ID":"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f","Type":"ContainerStarted","Data":"7110f67dd1beafaa9218cabab58a590d25f39a6838e30c5fe36a3ef4426a0a50"} Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.461498 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttnsf\" (UniqueName: \"kubernetes.io/projected/9d57ad58-9c10-4424-84cd-8658526709fa-kube-api-access-ttnsf\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.462079 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.462186 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d57ad58-9c10-4424-84cd-8658526709fa-logs\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.462281 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.462404 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d57ad58-9c10-4424-84cd-8658526709fa-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.463329 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-config-data\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.463463 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.463578 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxr52\" (UniqueName: \"kubernetes.io/projected/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-kube-api-access-hxr52\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.463751 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-scripts\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.463846 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d57ad58-9c10-4424-84cd-8658526709fa-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.463955 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d57ad58-9c10-4424-84cd-8658526709fa-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.464035 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9d57ad58-9c10-4424-84cd-8658526709fa-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.464140 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-logs\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.464271 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.471705 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-tf4br" podStartSLOduration=4.438717792 podStartE2EDuration="37.471684803s" podCreationTimestamp="2025-12-04 17:49:43 +0000 UTC" firstStartedPulling="2025-12-04 17:49:44.434322836 +0000 UTC m=+1314.466564834" lastFinishedPulling="2025-12-04 17:50:17.467289847 +0000 UTC m=+1347.499531845" observedRunningTime="2025-12-04 17:50:20.468490242 +0000 UTC m=+1350.500732250" watchObservedRunningTime="2025-12-04 17:50:20.471684803 +0000 UTC m=+1350.503926821" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.566233 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.566321 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d57ad58-9c10-4424-84cd-8658526709fa-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.566386 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-config-data\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.566418 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.566449 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxr52\" (UniqueName: \"kubernetes.io/projected/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-kube-api-access-hxr52\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.566477 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-scripts\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.566496 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d57ad58-9c10-4424-84cd-8658526709fa-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.566518 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d57ad58-9c10-4424-84cd-8658526709fa-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.566535 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9d57ad58-9c10-4424-84cd-8658526709fa-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.566561 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-logs\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.566600 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.566644 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttnsf\" (UniqueName: \"kubernetes.io/projected/9d57ad58-9c10-4424-84cd-8658526709fa-kube-api-access-ttnsf\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.566665 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.566686 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d57ad58-9c10-4424-84cd-8658526709fa-logs\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.567273 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d57ad58-9c10-4424-84cd-8658526709fa-logs\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.567544 4631 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.586350 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-logs\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.590019 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.590309 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9d57ad58-9c10-4424-84cd-8658526709fa-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.590714 4631 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.611187 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.616752 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d57ad58-9c10-4424-84cd-8658526709fa-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.627454 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-config-data\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.627963 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-scripts\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.631861 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.632508 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d57ad58-9c10-4424-84cd-8658526709fa-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.638431 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttnsf\" (UniqueName: \"kubernetes.io/projected/9d57ad58-9c10-4424-84cd-8658526709fa-kube-api-access-ttnsf\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.642764 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxr52\" (UniqueName: \"kubernetes.io/projected/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-kube-api-access-hxr52\") pod \"glance-default-external-api-0\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.646701 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d57ad58-9c10-4424-84cd-8658526709fa-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.652992 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.685663 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-gq8bc"] Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.722282 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.749348 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65965d6475-cbpr4"] Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.778740 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7b99dd8d64-9nrvl"] Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.789475 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-77d5fd455b-8kwkp"] Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.793884 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-m9cnv"] Dec 04 17:50:20 crc kubenswrapper[4631]: I1204 17:50:20.821091 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-55d98d789b-mxmt5"] Dec 04 17:50:20 crc kubenswrapper[4631]: W1204 17:50:20.978878 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3171d16d_db61_4d69_b9c7_262da016be91.slice/crio-2d348f9f14aa3489f9cc26b8e9391b4cf555ed20668079e69b8f66e613e003ed WatchSource:0}: Error finding container 2d348f9f14aa3489f9cc26b8e9391b4cf555ed20668079e69b8f66e613e003ed: Status 404 returned error can't find the container with id 2d348f9f14aa3489f9cc26b8e9391b4cf555ed20668079e69b8f66e613e003ed Dec 04 17:50:20 crc kubenswrapper[4631]: W1204 17:50:20.980886 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57d24425_a663_4dac_a200_f570f84cc1ea.slice/crio-35a8ebbbcfabcf581595fdccd0dd72c4df23ed1ee9f51ddff2f90e7e2cd29efe WatchSource:0}: Error finding container 35a8ebbbcfabcf581595fdccd0dd72c4df23ed1ee9f51ddff2f90e7e2cd29efe: Status 404 returned error can't find the container with id 35a8ebbbcfabcf581595fdccd0dd72c4df23ed1ee9f51ddff2f90e7e2cd29efe Dec 04 17:50:20 crc kubenswrapper[4631]: W1204 17:50:20.990500 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda2a21943_309a_4d97_9e94_5f3248de544c.slice/crio-f1e556fba8af9785eabd3962b567e29fed1266d6e4298152e88731e6ec882f75 WatchSource:0}: Error finding container f1e556fba8af9785eabd3962b567e29fed1266d6e4298152e88731e6ec882f75: Status 404 returned error can't find the container with id f1e556fba8af9785eabd3962b567e29fed1266d6e4298152e88731e6ec882f75 Dec 04 17:50:20 crc kubenswrapper[4631]: W1204 17:50:20.995115 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda675d52a_03e9_46e8_8b51_4e7f378179cf.slice/crio-0186dd6fb04f8834957eea59aaf33053c831eb9b19bf4e58a8658dfb47c240d4 WatchSource:0}: Error finding container 0186dd6fb04f8834957eea59aaf33053c831eb9b19bf4e58a8658dfb47c240d4: Status 404 returned error can't find the container with id 0186dd6fb04f8834957eea59aaf33053c831eb9b19bf4e58a8658dfb47c240d4 Dec 04 17:50:20 crc kubenswrapper[4631]: W1204 17:50:20.998969 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a4004ec_e678_4d85_a83a_b9af1bd78865.slice/crio-8211e37e432266ff3cd58a8cade8e3d05102733d4384577ec1e408fc0ad17865 WatchSource:0}: Error finding container 8211e37e432266ff3cd58a8cade8e3d05102733d4384577ec1e408fc0ad17865: Status 404 returned error can't find the container with id 8211e37e432266ff3cd58a8cade8e3d05102733d4384577ec1e408fc0ad17865 Dec 04 17:50:21 crc kubenswrapper[4631]: I1204 17:50:21.021761 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 17:50:21 crc kubenswrapper[4631]: I1204 17:50:21.464035 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gq8bc" event={"ID":"3171d16d-db61-4d69-b9c7-262da016be91","Type":"ContainerStarted","Data":"2d348f9f14aa3489f9cc26b8e9391b4cf555ed20668079e69b8f66e613e003ed"} Dec 04 17:50:21 crc kubenswrapper[4631]: I1204 17:50:21.483100 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" event={"ID":"1a4004ec-e678-4d85-a83a-b9af1bd78865","Type":"ContainerStarted","Data":"8211e37e432266ff3cd58a8cade8e3d05102733d4384577ec1e408fc0ad17865"} Dec 04 17:50:21 crc kubenswrapper[4631]: I1204 17:50:21.491498 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-77d5fd455b-8kwkp" event={"ID":"78aafb4d-470c-477d-bfe6-5b7a29b79fc0","Type":"ContainerStarted","Data":"1f300ee7fdaae700e3109c165657a4a22cc07b3b7e8c2878081cf1492dbe4d2b"} Dec 04 17:50:21 crc kubenswrapper[4631]: I1204 17:50:21.502264 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b99dd8d64-9nrvl" event={"ID":"a675d52a-03e9-46e8-8b51-4e7f378179cf","Type":"ContainerStarted","Data":"0186dd6fb04f8834957eea59aaf33053c831eb9b19bf4e58a8658dfb47c240d4"} Dec 04 17:50:21 crc kubenswrapper[4631]: I1204 17:50:21.517037 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65965d6475-cbpr4" event={"ID":"57d24425-a663-4dac-a200-f570f84cc1ea","Type":"ContainerStarted","Data":"35a8ebbbcfabcf581595fdccd0dd72c4df23ed1ee9f51ddff2f90e7e2cd29efe"} Dec 04 17:50:21 crc kubenswrapper[4631]: I1204 17:50:21.533987 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55d98d789b-mxmt5" event={"ID":"a2a21943-309a-4d97-9e94-5f3248de544c","Type":"ContainerStarted","Data":"f1e556fba8af9785eabd3962b567e29fed1266d6e4298152e88731e6ec882f75"} Dec 04 17:50:22 crc kubenswrapper[4631]: W1204 17:50:22.037637 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d57ad58_9c10_4424_84cd_8658526709fa.slice/crio-feee2643918cd8081b0b083cc429c0cc1d3506a6d82432c723f6835e7849fff0 WatchSource:0}: Error finding container feee2643918cd8081b0b083cc429c0cc1d3506a6d82432c723f6835e7849fff0: Status 404 returned error can't find the container with id feee2643918cd8081b0b083cc429c0cc1d3506a6d82432c723f6835e7849fff0 Dec 04 17:50:22 crc kubenswrapper[4631]: I1204 17:50:22.042697 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 17:50:22 crc kubenswrapper[4631]: I1204 17:50:22.147840 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 17:50:22 crc kubenswrapper[4631]: I1204 17:50:22.563114 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gq8bc" event={"ID":"3171d16d-db61-4d69-b9c7-262da016be91","Type":"ContainerStarted","Data":"e208b22924cd5eb32f1e398d016449503ea542b4d58964a3f358045a30c66cc1"} Dec 04 17:50:22 crc kubenswrapper[4631]: I1204 17:50:22.565588 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9d57ad58-9c10-4424-84cd-8658526709fa","Type":"ContainerStarted","Data":"feee2643918cd8081b0b083cc429c0cc1d3506a6d82432c723f6835e7849fff0"} Dec 04 17:50:22 crc kubenswrapper[4631]: I1204 17:50:22.573723 4631 generic.go:334] "Generic (PLEG): container finished" podID="1a4004ec-e678-4d85-a83a-b9af1bd78865" containerID="c1d51b59f3c0306470f800b58ca7c3e45cdbf32e011d9872e400a0c20ee84974" exitCode=0 Dec 04 17:50:22 crc kubenswrapper[4631]: I1204 17:50:22.573787 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" event={"ID":"1a4004ec-e678-4d85-a83a-b9af1bd78865","Type":"ContainerDied","Data":"c1d51b59f3c0306470f800b58ca7c3e45cdbf32e011d9872e400a0c20ee84974"} Dec 04 17:50:22 crc kubenswrapper[4631]: I1204 17:50:22.590575 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-gq8bc" podStartSLOduration=8.590558711 podStartE2EDuration="8.590558711s" podCreationTimestamp="2025-12-04 17:50:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:50:22.584110617 +0000 UTC m=+1352.616352615" watchObservedRunningTime="2025-12-04 17:50:22.590558711 +0000 UTC m=+1352.622800709" Dec 04 17:50:22 crc kubenswrapper[4631]: I1204 17:50:22.606533 4631 generic.go:334] "Generic (PLEG): container finished" podID="57d24425-a663-4dac-a200-f570f84cc1ea" containerID="e4970f304338a39f204e8b83ac846f811f7e236249572206ef5b6f6e50cd16f9" exitCode=0 Dec 04 17:50:22 crc kubenswrapper[4631]: I1204 17:50:22.606599 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65965d6475-cbpr4" event={"ID":"57d24425-a663-4dac-a200-f570f84cc1ea","Type":"ContainerDied","Data":"e4970f304338a39f204e8b83ac846f811f7e236249572206ef5b6f6e50cd16f9"} Dec 04 17:50:22 crc kubenswrapper[4631]: I1204 17:50:22.619045 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55d98d789b-mxmt5" event={"ID":"a2a21943-309a-4d97-9e94-5f3248de544c","Type":"ContainerStarted","Data":"05c8f34ef3cd9028ef6940053c7f05e99242048b15d5dea70493d4f3817ca6e3"} Dec 04 17:50:22 crc kubenswrapper[4631]: I1204 17:50:22.629429 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77","Type":"ContainerStarted","Data":"7d2044854dcee8d3005049a1e820b06d93967cacd2cd2023e52f6f35aa31b74d"} Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.054627 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.228887 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.235147 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.358469 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-config\") pod \"57d24425-a663-4dac-a200-f570f84cc1ea\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.358529 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-dns-swift-storage-0\") pod \"57d24425-a663-4dac-a200-f570f84cc1ea\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.358620 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svsds\" (UniqueName: \"kubernetes.io/projected/57d24425-a663-4dac-a200-f570f84cc1ea-kube-api-access-svsds\") pod \"57d24425-a663-4dac-a200-f570f84cc1ea\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.358676 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-ovsdbserver-nb\") pod \"57d24425-a663-4dac-a200-f570f84cc1ea\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.358738 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-ovsdbserver-sb\") pod \"57d24425-a663-4dac-a200-f570f84cc1ea\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.358782 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-dns-svc\") pod \"57d24425-a663-4dac-a200-f570f84cc1ea\" (UID: \"57d24425-a663-4dac-a200-f570f84cc1ea\") " Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.413315 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57d24425-a663-4dac-a200-f570f84cc1ea-kube-api-access-svsds" (OuterVolumeSpecName: "kube-api-access-svsds") pod "57d24425-a663-4dac-a200-f570f84cc1ea" (UID: "57d24425-a663-4dac-a200-f570f84cc1ea"). InnerVolumeSpecName "kube-api-access-svsds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.462892 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svsds\" (UniqueName: \"kubernetes.io/projected/57d24425-a663-4dac-a200-f570f84cc1ea-kube-api-access-svsds\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.536006 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "57d24425-a663-4dac-a200-f570f84cc1ea" (UID: "57d24425-a663-4dac-a200-f570f84cc1ea"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.559179 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "57d24425-a663-4dac-a200-f570f84cc1ea" (UID: "57d24425-a663-4dac-a200-f570f84cc1ea"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.564219 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.564251 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.566794 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-config" (OuterVolumeSpecName: "config") pod "57d24425-a663-4dac-a200-f570f84cc1ea" (UID: "57d24425-a663-4dac-a200-f570f84cc1ea"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.573546 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "57d24425-a663-4dac-a200-f570f84cc1ea" (UID: "57d24425-a663-4dac-a200-f570f84cc1ea"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.577365 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "57d24425-a663-4dac-a200-f570f84cc1ea" (UID: "57d24425-a663-4dac-a200-f570f84cc1ea"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.638155 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b99dd8d64-9nrvl" event={"ID":"a675d52a-03e9-46e8-8b51-4e7f378179cf","Type":"ContainerStarted","Data":"dd83d7f3d8572b684138f5f3d8a13e79de4098e2db853d6a15e483ce33e834ab"} Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.642241 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65965d6475-cbpr4" event={"ID":"57d24425-a663-4dac-a200-f570f84cc1ea","Type":"ContainerDied","Data":"35a8ebbbcfabcf581595fdccd0dd72c4df23ed1ee9f51ddff2f90e7e2cd29efe"} Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.642287 4631 scope.go:117] "RemoveContainer" containerID="e4970f304338a39f204e8b83ac846f811f7e236249572206ef5b6f6e50cd16f9" Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.642253 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65965d6475-cbpr4" Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.647108 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55d98d789b-mxmt5" event={"ID":"a2a21943-309a-4d97-9e94-5f3248de544c","Type":"ContainerStarted","Data":"c7234416a30da8efe9fd058bca01c7f0b459a50f43ae28b10faa94fc2225254a"} Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.648710 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.651241 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" event={"ID":"1a4004ec-e678-4d85-a83a-b9af1bd78865","Type":"ContainerStarted","Data":"6a8cd481f651c8d73b1b19c26aadca1133fdfc3e0cdc4cc28604f2e00795a23b"} Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.652361 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.654215 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc7c63c8-d4b4-49fd-81a7-1720d58d2934","Type":"ContainerStarted","Data":"4109a6077f4a790a013d97cbb7037357e84f36997e835618b3d47f18252e6c46"} Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.658099 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-77d5fd455b-8kwkp" event={"ID":"78aafb4d-470c-477d-bfe6-5b7a29b79fc0","Type":"ContainerStarted","Data":"0f78926ce90b6c4a118350734f0fc4d5357acd257aae02847bdd3e1675a2e79f"} Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.665669 4631 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.666617 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.666677 4631 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/57d24425-a663-4dac-a200-f570f84cc1ea-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.695666 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" podStartSLOduration=4.695650779 podStartE2EDuration="4.695650779s" podCreationTimestamp="2025-12-04 17:50:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:50:23.694153217 +0000 UTC m=+1353.726395215" watchObservedRunningTime="2025-12-04 17:50:23.695650779 +0000 UTC m=+1353.727892777" Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.698601 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-55d98d789b-mxmt5" podStartSLOduration=4.698594173 podStartE2EDuration="4.698594173s" podCreationTimestamp="2025-12-04 17:50:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:50:23.677747258 +0000 UTC m=+1353.709989256" watchObservedRunningTime="2025-12-04 17:50:23.698594173 +0000 UTC m=+1353.730836171" Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.742859 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65965d6475-cbpr4"] Dec 04 17:50:23 crc kubenswrapper[4631]: I1204 17:50:23.750980 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-65965d6475-cbpr4"] Dec 04 17:50:24 crc kubenswrapper[4631]: I1204 17:50:24.270468 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57d24425-a663-4dac-a200-f570f84cc1ea" path="/var/lib/kubelet/pods/57d24425-a663-4dac-a200-f570f84cc1ea/volumes" Dec 04 17:50:24 crc kubenswrapper[4631]: I1204 17:50:24.680423 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9d57ad58-9c10-4424-84cd-8658526709fa","Type":"ContainerStarted","Data":"0e63eeb1e79dc0261a52606dac45d75fa27b9210be5bd7b3be43789885ba9956"} Dec 04 17:50:24 crc kubenswrapper[4631]: I1204 17:50:24.685483 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-77d5fd455b-8kwkp" event={"ID":"78aafb4d-470c-477d-bfe6-5b7a29b79fc0","Type":"ContainerStarted","Data":"b06b4495ac4d64fb90ce71822defbadb0ac8243d3559e3457c5c1f6e4549434b"} Dec 04 17:50:24 crc kubenswrapper[4631]: I1204 17:50:24.689284 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b99dd8d64-9nrvl" event={"ID":"a675d52a-03e9-46e8-8b51-4e7f378179cf","Type":"ContainerStarted","Data":"c0232110e98619eba87d73ad4f17b16ad869ae5f8c81c4f0cb9721c3b739dcde"} Dec 04 17:50:24 crc kubenswrapper[4631]: I1204 17:50:24.696244 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77","Type":"ContainerStarted","Data":"47168fe9b26aa94590ee97f585689e0ba044c08f8bf75e3882a5734ec3b3e732"} Dec 04 17:50:24 crc kubenswrapper[4631]: I1204 17:50:24.727815 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-77d5fd455b-8kwkp" podStartSLOduration=31.783522927 podStartE2EDuration="32.727797963s" podCreationTimestamp="2025-12-04 17:49:52 +0000 UTC" firstStartedPulling="2025-12-04 17:50:20.970883388 +0000 UTC m=+1351.003125376" lastFinishedPulling="2025-12-04 17:50:21.915158394 +0000 UTC m=+1351.947400412" observedRunningTime="2025-12-04 17:50:24.723737427 +0000 UTC m=+1354.755979425" watchObservedRunningTime="2025-12-04 17:50:24.727797963 +0000 UTC m=+1354.760039961" Dec 04 17:50:24 crc kubenswrapper[4631]: I1204 17:50:24.750257 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7b99dd8d64-9nrvl" podStartSLOduration=31.740794627 podStartE2EDuration="32.750243504s" podCreationTimestamp="2025-12-04 17:49:52 +0000 UTC" firstStartedPulling="2025-12-04 17:50:21.048549706 +0000 UTC m=+1351.080791704" lastFinishedPulling="2025-12-04 17:50:22.057998583 +0000 UTC m=+1352.090240581" observedRunningTime="2025-12-04 17:50:24.749016799 +0000 UTC m=+1354.781258797" watchObservedRunningTime="2025-12-04 17:50:24.750243504 +0000 UTC m=+1354.782485502" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.169483 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6d6c6b7549-c7hqg"] Dec 04 17:50:25 crc kubenswrapper[4631]: E1204 17:50:25.170076 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57d24425-a663-4dac-a200-f570f84cc1ea" containerName="init" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.170088 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="57d24425-a663-4dac-a200-f570f84cc1ea" containerName="init" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.170298 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="57d24425-a663-4dac-a200-f570f84cc1ea" containerName="init" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.171097 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.174397 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.182982 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.195815 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsx5p\" (UniqueName: \"kubernetes.io/projected/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-kube-api-access-lsx5p\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.195859 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-combined-ca-bundle\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.195879 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-httpd-config\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.196042 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-internal-tls-certs\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.196125 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-config\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.196194 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-ovndb-tls-certs\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.196301 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-public-tls-certs\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.222932 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6d6c6b7549-c7hqg"] Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.308203 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsx5p\" (UniqueName: \"kubernetes.io/projected/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-kube-api-access-lsx5p\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.308315 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-combined-ca-bundle\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.308336 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-httpd-config\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.308513 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-internal-tls-certs\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.317840 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-config\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.318304 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-ovndb-tls-certs\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.318493 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-public-tls-certs\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.323181 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-ovndb-tls-certs\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.324009 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-internal-tls-certs\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.329802 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-config\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.331999 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-httpd-config\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.332401 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-public-tls-certs\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.334179 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-combined-ca-bundle\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.352789 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsx5p\" (UniqueName: \"kubernetes.io/projected/7dc0a764-9aea-494f-b71e-eb0df5cf3d66-kube-api-access-lsx5p\") pod \"neutron-6d6c6b7549-c7hqg\" (UID: \"7dc0a764-9aea-494f-b71e-eb0df5cf3d66\") " pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.493019 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.729343 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77","Type":"ContainerStarted","Data":"6a68f80c904f9db30afbf5a5d355499b75c15628724dce9f1dee881e8df3185f"} Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.729854 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a2fdc5b3-e844-49c5-b1ff-88ad16e57a77" containerName="glance-log" containerID="cri-o://47168fe9b26aa94590ee97f585689e0ba044c08f8bf75e3882a5734ec3b3e732" gracePeriod=30 Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.730550 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a2fdc5b3-e844-49c5-b1ff-88ad16e57a77" containerName="glance-httpd" containerID="cri-o://6a68f80c904f9db30afbf5a5d355499b75c15628724dce9f1dee881e8df3185f" gracePeriod=30 Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.746655 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9d57ad58-9c10-4424-84cd-8658526709fa","Type":"ContainerStarted","Data":"663d6e838a59d8906e9c42aec41fd708ec3adee9b4be74213d78bc8b5d58d551"} Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.747359 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9d57ad58-9c10-4424-84cd-8658526709fa" containerName="glance-log" containerID="cri-o://0e63eeb1e79dc0261a52606dac45d75fa27b9210be5bd7b3be43789885ba9956" gracePeriod=30 Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.747594 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9d57ad58-9c10-4424-84cd-8658526709fa" containerName="glance-httpd" containerID="cri-o://663d6e838a59d8906e9c42aec41fd708ec3adee9b4be74213d78bc8b5d58d551" gracePeriod=30 Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.758422 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.758400874 podStartE2EDuration="6.758400874s" podCreationTimestamp="2025-12-04 17:50:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:50:25.754267386 +0000 UTC m=+1355.786509394" watchObservedRunningTime="2025-12-04 17:50:25.758400874 +0000 UTC m=+1355.790642872" Dec 04 17:50:25 crc kubenswrapper[4631]: I1204 17:50:25.805096 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.805076037 podStartE2EDuration="6.805076037s" podCreationTimestamp="2025-12-04 17:50:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:50:25.780166056 +0000 UTC m=+1355.812408064" watchObservedRunningTime="2025-12-04 17:50:25.805076037 +0000 UTC m=+1355.837318035" Dec 04 17:50:26 crc kubenswrapper[4631]: I1204 17:50:26.160643 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6d6c6b7549-c7hqg"] Dec 04 17:50:26 crc kubenswrapper[4631]: W1204 17:50:26.174461 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7dc0a764_9aea_494f_b71e_eb0df5cf3d66.slice/crio-6c5284870e2efc1415c5e333de65c0056fce1e46496f09832bdf18810b17493c WatchSource:0}: Error finding container 6c5284870e2efc1415c5e333de65c0056fce1e46496f09832bdf18810b17493c: Status 404 returned error can't find the container with id 6c5284870e2efc1415c5e333de65c0056fce1e46496f09832bdf18810b17493c Dec 04 17:50:26 crc kubenswrapper[4631]: I1204 17:50:26.770332 4631 generic.go:334] "Generic (PLEG): container finished" podID="c6763071-ba0b-4ef7-9843-9a4c66fe4a6f" containerID="7110f67dd1beafaa9218cabab58a590d25f39a6838e30c5fe36a3ef4426a0a50" exitCode=0 Dec 04 17:50:26 crc kubenswrapper[4631]: I1204 17:50:26.770829 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-tf4br" event={"ID":"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f","Type":"ContainerDied","Data":"7110f67dd1beafaa9218cabab58a590d25f39a6838e30c5fe36a3ef4426a0a50"} Dec 04 17:50:26 crc kubenswrapper[4631]: I1204 17:50:26.779622 4631 generic.go:334] "Generic (PLEG): container finished" podID="a2fdc5b3-e844-49c5-b1ff-88ad16e57a77" containerID="6a68f80c904f9db30afbf5a5d355499b75c15628724dce9f1dee881e8df3185f" exitCode=0 Dec 04 17:50:26 crc kubenswrapper[4631]: I1204 17:50:26.779647 4631 generic.go:334] "Generic (PLEG): container finished" podID="a2fdc5b3-e844-49c5-b1ff-88ad16e57a77" containerID="47168fe9b26aa94590ee97f585689e0ba044c08f8bf75e3882a5734ec3b3e732" exitCode=143 Dec 04 17:50:26 crc kubenswrapper[4631]: I1204 17:50:26.779688 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77","Type":"ContainerDied","Data":"6a68f80c904f9db30afbf5a5d355499b75c15628724dce9f1dee881e8df3185f"} Dec 04 17:50:26 crc kubenswrapper[4631]: I1204 17:50:26.779714 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77","Type":"ContainerDied","Data":"47168fe9b26aa94590ee97f585689e0ba044c08f8bf75e3882a5734ec3b3e732"} Dec 04 17:50:26 crc kubenswrapper[4631]: I1204 17:50:26.797250 4631 generic.go:334] "Generic (PLEG): container finished" podID="9d57ad58-9c10-4424-84cd-8658526709fa" containerID="663d6e838a59d8906e9c42aec41fd708ec3adee9b4be74213d78bc8b5d58d551" exitCode=0 Dec 04 17:50:26 crc kubenswrapper[4631]: I1204 17:50:26.797487 4631 generic.go:334] "Generic (PLEG): container finished" podID="9d57ad58-9c10-4424-84cd-8658526709fa" containerID="0e63eeb1e79dc0261a52606dac45d75fa27b9210be5bd7b3be43789885ba9956" exitCode=143 Dec 04 17:50:26 crc kubenswrapper[4631]: I1204 17:50:26.797591 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9d57ad58-9c10-4424-84cd-8658526709fa","Type":"ContainerDied","Data":"663d6e838a59d8906e9c42aec41fd708ec3adee9b4be74213d78bc8b5d58d551"} Dec 04 17:50:26 crc kubenswrapper[4631]: I1204 17:50:26.797699 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9d57ad58-9c10-4424-84cd-8658526709fa","Type":"ContainerDied","Data":"0e63eeb1e79dc0261a52606dac45d75fa27b9210be5bd7b3be43789885ba9956"} Dec 04 17:50:26 crc kubenswrapper[4631]: I1204 17:50:26.800950 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d6c6b7549-c7hqg" event={"ID":"7dc0a764-9aea-494f-b71e-eb0df5cf3d66","Type":"ContainerStarted","Data":"2d01eb01701c6662f56ea20410d19e0aaf9e91fc2976092b3a1594dd9633ffb2"} Dec 04 17:50:26 crc kubenswrapper[4631]: I1204 17:50:26.805823 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d6c6b7549-c7hqg" event={"ID":"7dc0a764-9aea-494f-b71e-eb0df5cf3d66","Type":"ContainerStarted","Data":"6c5284870e2efc1415c5e333de65c0056fce1e46496f09832bdf18810b17493c"} Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.001412 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.066603 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d57ad58-9c10-4424-84cd-8658526709fa-logs\") pod \"9d57ad58-9c10-4424-84cd-8658526709fa\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.066691 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d57ad58-9c10-4424-84cd-8658526709fa-combined-ca-bundle\") pod \"9d57ad58-9c10-4424-84cd-8658526709fa\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.066782 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d57ad58-9c10-4424-84cd-8658526709fa-config-data\") pod \"9d57ad58-9c10-4424-84cd-8658526709fa\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.067107 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttnsf\" (UniqueName: \"kubernetes.io/projected/9d57ad58-9c10-4424-84cd-8658526709fa-kube-api-access-ttnsf\") pod \"9d57ad58-9c10-4424-84cd-8658526709fa\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.067124 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d57ad58-9c10-4424-84cd-8658526709fa-scripts\") pod \"9d57ad58-9c10-4424-84cd-8658526709fa\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.067177 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"9d57ad58-9c10-4424-84cd-8658526709fa\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.067207 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9d57ad58-9c10-4424-84cd-8658526709fa-httpd-run\") pod \"9d57ad58-9c10-4424-84cd-8658526709fa\" (UID: \"9d57ad58-9c10-4424-84cd-8658526709fa\") " Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.068050 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d57ad58-9c10-4424-84cd-8658526709fa-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "9d57ad58-9c10-4424-84cd-8658526709fa" (UID: "9d57ad58-9c10-4424-84cd-8658526709fa"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.068081 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d57ad58-9c10-4424-84cd-8658526709fa-logs" (OuterVolumeSpecName: "logs") pod "9d57ad58-9c10-4424-84cd-8658526709fa" (UID: "9d57ad58-9c10-4424-84cd-8658526709fa"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.082682 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d57ad58-9c10-4424-84cd-8658526709fa-kube-api-access-ttnsf" (OuterVolumeSpecName: "kube-api-access-ttnsf") pod "9d57ad58-9c10-4424-84cd-8658526709fa" (UID: "9d57ad58-9c10-4424-84cd-8658526709fa"). InnerVolumeSpecName "kube-api-access-ttnsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.082779 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "9d57ad58-9c10-4424-84cd-8658526709fa" (UID: "9d57ad58-9c10-4424-84cd-8658526709fa"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.083798 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.084234 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d57ad58-9c10-4424-84cd-8658526709fa-scripts" (OuterVolumeSpecName: "scripts") pod "9d57ad58-9c10-4424-84cd-8658526709fa" (UID: "9d57ad58-9c10-4424-84cd-8658526709fa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.120689 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d57ad58-9c10-4424-84cd-8658526709fa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9d57ad58-9c10-4424-84cd-8658526709fa" (UID: "9d57ad58-9c10-4424-84cd-8658526709fa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.153765 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d57ad58-9c10-4424-84cd-8658526709fa-config-data" (OuterVolumeSpecName: "config-data") pod "9d57ad58-9c10-4424-84cd-8658526709fa" (UID: "9d57ad58-9c10-4424-84cd-8658526709fa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.168749 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-scripts\") pod \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.168820 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-config-data\") pod \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.168845 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxr52\" (UniqueName: \"kubernetes.io/projected/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-kube-api-access-hxr52\") pod \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.168864 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-combined-ca-bundle\") pod \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.168976 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-httpd-run\") pod \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.169049 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-logs\") pod \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.169099 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\" (UID: \"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77\") " Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.169666 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a2fdc5b3-e844-49c5-b1ff-88ad16e57a77" (UID: "a2fdc5b3-e844-49c5-b1ff-88ad16e57a77"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.169857 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-logs" (OuterVolumeSpecName: "logs") pod "a2fdc5b3-e844-49c5-b1ff-88ad16e57a77" (UID: "a2fdc5b3-e844-49c5-b1ff-88ad16e57a77"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.170055 4631 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-logs\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.170073 4631 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d57ad58-9c10-4424-84cd-8658526709fa-logs\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.170083 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d57ad58-9c10-4424-84cd-8658526709fa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.170093 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d57ad58-9c10-4424-84cd-8658526709fa-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.170102 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttnsf\" (UniqueName: \"kubernetes.io/projected/9d57ad58-9c10-4424-84cd-8658526709fa-kube-api-access-ttnsf\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.170111 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d57ad58-9c10-4424-84cd-8658526709fa-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.170130 4631 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.170141 4631 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9d57ad58-9c10-4424-84cd-8658526709fa-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.170150 4631 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.173624 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-scripts" (OuterVolumeSpecName: "scripts") pod "a2fdc5b3-e844-49c5-b1ff-88ad16e57a77" (UID: "a2fdc5b3-e844-49c5-b1ff-88ad16e57a77"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.175602 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-kube-api-access-hxr52" (OuterVolumeSpecName: "kube-api-access-hxr52") pod "a2fdc5b3-e844-49c5-b1ff-88ad16e57a77" (UID: "a2fdc5b3-e844-49c5-b1ff-88ad16e57a77"). InnerVolumeSpecName "kube-api-access-hxr52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.205859 4631 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.206843 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "a2fdc5b3-e844-49c5-b1ff-88ad16e57a77" (UID: "a2fdc5b3-e844-49c5-b1ff-88ad16e57a77"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.213992 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a2fdc5b3-e844-49c5-b1ff-88ad16e57a77" (UID: "a2fdc5b3-e844-49c5-b1ff-88ad16e57a77"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.244689 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-config-data" (OuterVolumeSpecName: "config-data") pod "a2fdc5b3-e844-49c5-b1ff-88ad16e57a77" (UID: "a2fdc5b3-e844-49c5-b1ff-88ad16e57a77"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.271655 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.271684 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.271697 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxr52\" (UniqueName: \"kubernetes.io/projected/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-kube-api-access-hxr52\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.271706 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.271715 4631 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.271734 4631 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.291745 4631 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.396327 4631 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.819647 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9d57ad58-9c10-4424-84cd-8658526709fa","Type":"ContainerDied","Data":"feee2643918cd8081b0b083cc429c0cc1d3506a6d82432c723f6835e7849fff0"} Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.819712 4631 scope.go:117] "RemoveContainer" containerID="663d6e838a59d8906e9c42aec41fd708ec3adee9b4be74213d78bc8b5d58d551" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.819889 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.840500 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d6c6b7549-c7hqg" event={"ID":"7dc0a764-9aea-494f-b71e-eb0df5cf3d66","Type":"ContainerStarted","Data":"55ccbdadc5c3dd134b4067aaa29dfaec4033c811ff269de0b990580a28e7597c"} Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.840579 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.851364 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.866548 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a2fdc5b3-e844-49c5-b1ff-88ad16e57a77","Type":"ContainerDied","Data":"7d2044854dcee8d3005049a1e820b06d93967cacd2cd2023e52f6f35aa31b74d"} Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.873799 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.897757 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.919509 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 17:50:27 crc kubenswrapper[4631]: E1204 17:50:27.920160 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2fdc5b3-e844-49c5-b1ff-88ad16e57a77" containerName="glance-httpd" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.920180 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2fdc5b3-e844-49c5-b1ff-88ad16e57a77" containerName="glance-httpd" Dec 04 17:50:27 crc kubenswrapper[4631]: E1204 17:50:27.920192 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d57ad58-9c10-4424-84cd-8658526709fa" containerName="glance-log" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.920199 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d57ad58-9c10-4424-84cd-8658526709fa" containerName="glance-log" Dec 04 17:50:27 crc kubenswrapper[4631]: E1204 17:50:27.920239 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2fdc5b3-e844-49c5-b1ff-88ad16e57a77" containerName="glance-log" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.920248 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2fdc5b3-e844-49c5-b1ff-88ad16e57a77" containerName="glance-log" Dec 04 17:50:27 crc kubenswrapper[4631]: E1204 17:50:27.920276 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d57ad58-9c10-4424-84cd-8658526709fa" containerName="glance-httpd" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.920303 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d57ad58-9c10-4424-84cd-8658526709fa" containerName="glance-httpd" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.920507 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d57ad58-9c10-4424-84cd-8658526709fa" containerName="glance-httpd" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.920524 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2fdc5b3-e844-49c5-b1ff-88ad16e57a77" containerName="glance-httpd" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.920554 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d57ad58-9c10-4424-84cd-8658526709fa" containerName="glance-log" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.920575 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2fdc5b3-e844-49c5-b1ff-88ad16e57a77" containerName="glance-log" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.921736 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.925116 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-ntp7m" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.926069 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.926180 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.927327 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6d6c6b7549-c7hqg" podStartSLOduration=2.92730993 podStartE2EDuration="2.92730993s" podCreationTimestamp="2025-12-04 17:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:50:27.89964038 +0000 UTC m=+1357.931882378" watchObservedRunningTime="2025-12-04 17:50:27.92730993 +0000 UTC m=+1357.959551928" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.927811 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.927976 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 04 17:50:27 crc kubenswrapper[4631]: I1204 17:50:27.986425 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.005437 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.008706 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgpzr\" (UniqueName: \"kubernetes.io/projected/cb490f12-7a53-4fbd-a994-1c0ae225f253-kube-api-access-kgpzr\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.008747 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.008793 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.008818 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.008943 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.009016 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb490f12-7a53-4fbd-a994-1c0ae225f253-logs\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.009033 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cb490f12-7a53-4fbd-a994-1c0ae225f253-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.009059 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.014323 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.015876 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.020679 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.020945 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.100523 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.114466 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgpzr\" (UniqueName: \"kubernetes.io/projected/cb490f12-7a53-4fbd-a994-1c0ae225f253-kube-api-access-kgpzr\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.114513 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.114546 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f065ae96-43d1-4d55-824e-76922d68ddc9-logs\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.114569 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.114596 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f065ae96-43d1-4d55-824e-76922d68ddc9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.114636 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.114663 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.114711 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.114746 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.114777 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-scripts\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.114797 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c996k\" (UniqueName: \"kubernetes.io/projected/f065ae96-43d1-4d55-824e-76922d68ddc9-kube-api-access-c996k\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.114849 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.114879 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-config-data\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.114923 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb490f12-7a53-4fbd-a994-1c0ae225f253-logs\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.114947 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cb490f12-7a53-4fbd-a994-1c0ae225f253-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.114981 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.115220 4631 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.116985 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb490f12-7a53-4fbd-a994-1c0ae225f253-logs\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.118189 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cb490f12-7a53-4fbd-a994-1c0ae225f253-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.121891 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.156201 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.156699 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgpzr\" (UniqueName: \"kubernetes.io/projected/cb490f12-7a53-4fbd-a994-1c0ae225f253-kube-api-access-kgpzr\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.158962 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.195033 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.195626 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.216326 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-config-data\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.216445 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.216485 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f065ae96-43d1-4d55-824e-76922d68ddc9-logs\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.216504 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f065ae96-43d1-4d55-824e-76922d68ddc9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.216584 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.216598 4631 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.216631 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-scripts\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.216655 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c996k\" (UniqueName: \"kubernetes.io/projected/f065ae96-43d1-4d55-824e-76922d68ddc9-kube-api-access-c996k\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.216711 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.223592 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.224011 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f065ae96-43d1-4d55-824e-76922d68ddc9-logs\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.224273 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f065ae96-43d1-4d55-824e-76922d68ddc9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.226964 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.229114 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-scripts\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.288786 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.290459 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c996k\" (UniqueName: \"kubernetes.io/projected/f065ae96-43d1-4d55-824e-76922d68ddc9-kube-api-access-c996k\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.293545 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-config-data\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.361946 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d57ad58-9c10-4424-84cd-8658526709fa" path="/var/lib/kubelet/pods/9d57ad58-9c10-4424-84cd-8658526709fa/volumes" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.362679 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2fdc5b3-e844-49c5-b1ff-88ad16e57a77" path="/var/lib/kubelet/pods/a2fdc5b3-e844-49c5-b1ff-88ad16e57a77/volumes" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.381496 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " pod="openstack/glance-default-external-api-0" Dec 04 17:50:28 crc kubenswrapper[4631]: I1204 17:50:28.655583 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 17:50:29 crc kubenswrapper[4631]: I1204 17:50:29.728546 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:50:29 crc kubenswrapper[4631]: I1204 17:50:29.809972 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-h4pr5"] Dec 04 17:50:29 crc kubenswrapper[4631]: I1204 17:50:29.810246 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" podUID="2b9c86a8-914f-4bd7-a3f8-7065e92a4b70" containerName="dnsmasq-dns" containerID="cri-o://7889725e9ccb0c9ca6981fcffba80a40b9dfbec70152cd541065295b0accb348" gracePeriod=10 Dec 04 17:50:30 crc kubenswrapper[4631]: I1204 17:50:30.877817 4631 generic.go:334] "Generic (PLEG): container finished" podID="2b9c86a8-914f-4bd7-a3f8-7065e92a4b70" containerID="7889725e9ccb0c9ca6981fcffba80a40b9dfbec70152cd541065295b0accb348" exitCode=0 Dec 04 17:50:30 crc kubenswrapper[4631]: I1204 17:50:30.877863 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" event={"ID":"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70","Type":"ContainerDied","Data":"7889725e9ccb0c9ca6981fcffba80a40b9dfbec70152cd541065295b0accb348"} Dec 04 17:50:31 crc kubenswrapper[4631]: I1204 17:50:31.889619 4631 generic.go:334] "Generic (PLEG): container finished" podID="3171d16d-db61-4d69-b9c7-262da016be91" containerID="e208b22924cd5eb32f1e398d016449503ea542b4d58964a3f358045a30c66cc1" exitCode=0 Dec 04 17:50:31 crc kubenswrapper[4631]: I1204 17:50:31.889658 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gq8bc" event={"ID":"3171d16d-db61-4d69-b9c7-262da016be91","Type":"ContainerDied","Data":"e208b22924cd5eb32f1e398d016449503ea542b4d58964a3f358045a30c66cc1"} Dec 04 17:50:32 crc kubenswrapper[4631]: I1204 17:50:32.874901 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:50:32 crc kubenswrapper[4631]: I1204 17:50:32.875933 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:50:33 crc kubenswrapper[4631]: I1204 17:50:33.000300 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:50:33 crc kubenswrapper[4631]: I1204 17:50:33.000350 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:50:33 crc kubenswrapper[4631]: I1204 17:50:33.989959 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" podUID="2b9c86a8-914f-4bd7-a3f8-7065e92a4b70" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.140:5353: connect: connection refused" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.425453 4631 scope.go:117] "RemoveContainer" containerID="0e63eeb1e79dc0261a52606dac45d75fa27b9210be5bd7b3be43789885ba9956" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.612013 4631 scope.go:117] "RemoveContainer" containerID="6a68f80c904f9db30afbf5a5d355499b75c15628724dce9f1dee881e8df3185f" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.663654 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-tf4br" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.665745 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.675350 4631 scope.go:117] "RemoveContainer" containerID="47168fe9b26aa94590ee97f585689e0ba044c08f8bf75e3882a5734ec3b3e732" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.752161 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.770674 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-logs\") pod \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.770722 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-config-data\") pod \"3171d16d-db61-4d69-b9c7-262da016be91\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.770822 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-config-data\") pod \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.770854 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-combined-ca-bundle\") pod \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.770933 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4l49r\" (UniqueName: \"kubernetes.io/projected/3171d16d-db61-4d69-b9c7-262da016be91-kube-api-access-4l49r\") pod \"3171d16d-db61-4d69-b9c7-262da016be91\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.770959 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-scripts\") pod \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.770983 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pkmh\" (UniqueName: \"kubernetes.io/projected/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-kube-api-access-5pkmh\") pod \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\" (UID: \"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f\") " Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.771053 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-fernet-keys\") pod \"3171d16d-db61-4d69-b9c7-262da016be91\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.771082 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-scripts\") pod \"3171d16d-db61-4d69-b9c7-262da016be91\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.773312 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-credential-keys\") pod \"3171d16d-db61-4d69-b9c7-262da016be91\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.773529 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-combined-ca-bundle\") pod \"3171d16d-db61-4d69-b9c7-262da016be91\" (UID: \"3171d16d-db61-4d69-b9c7-262da016be91\") " Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.775900 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-logs" (OuterVolumeSpecName: "logs") pod "c6763071-ba0b-4ef7-9843-9a4c66fe4a6f" (UID: "c6763071-ba0b-4ef7-9843-9a4c66fe4a6f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.798133 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3171d16d-db61-4d69-b9c7-262da016be91-kube-api-access-4l49r" (OuterVolumeSpecName: "kube-api-access-4l49r") pod "3171d16d-db61-4d69-b9c7-262da016be91" (UID: "3171d16d-db61-4d69-b9c7-262da016be91"). InnerVolumeSpecName "kube-api-access-4l49r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.807701 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3171d16d-db61-4d69-b9c7-262da016be91" (UID: "3171d16d-db61-4d69-b9c7-262da016be91"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.807857 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-scripts" (OuterVolumeSpecName: "scripts") pod "c6763071-ba0b-4ef7-9843-9a4c66fe4a6f" (UID: "c6763071-ba0b-4ef7-9843-9a4c66fe4a6f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.810816 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-scripts" (OuterVolumeSpecName: "scripts") pod "3171d16d-db61-4d69-b9c7-262da016be91" (UID: "3171d16d-db61-4d69-b9c7-262da016be91"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.814566 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-kube-api-access-5pkmh" (OuterVolumeSpecName: "kube-api-access-5pkmh") pod "c6763071-ba0b-4ef7-9843-9a4c66fe4a6f" (UID: "c6763071-ba0b-4ef7-9843-9a4c66fe4a6f"). InnerVolumeSpecName "kube-api-access-5pkmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.814647 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "3171d16d-db61-4d69-b9c7-262da016be91" (UID: "3171d16d-db61-4d69-b9c7-262da016be91"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.875965 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-ovsdbserver-nb\") pod \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.876028 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-dns-swift-storage-0\") pod \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.876068 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-config\") pod \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.876162 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-ovsdbserver-sb\") pod \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.876269 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-dns-svc\") pod \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.876322 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zlgh\" (UniqueName: \"kubernetes.io/projected/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-kube-api-access-2zlgh\") pod \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\" (UID: \"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70\") " Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.876809 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.876826 4631 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.876839 4631 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-logs\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.876851 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4l49r\" (UniqueName: \"kubernetes.io/projected/3171d16d-db61-4d69-b9c7-262da016be91-kube-api-access-4l49r\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.876862 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.876874 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pkmh\" (UniqueName: \"kubernetes.io/projected/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-kube-api-access-5pkmh\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.876884 4631 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.895586 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-kube-api-access-2zlgh" (OuterVolumeSpecName: "kube-api-access-2zlgh") pod "2b9c86a8-914f-4bd7-a3f8-7065e92a4b70" (UID: "2b9c86a8-914f-4bd7-a3f8-7065e92a4b70"). InnerVolumeSpecName "kube-api-access-2zlgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.941240 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" event={"ID":"2b9c86a8-914f-4bd7-a3f8-7065e92a4b70","Type":"ContainerDied","Data":"77901343db8cf582b4a0331644b225687c2b20e01627395d8a030df8f35749d2"} Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.941292 4631 scope.go:117] "RemoveContainer" containerID="7889725e9ccb0c9ca6981fcffba80a40b9dfbec70152cd541065295b0accb348" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.941354 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-h4pr5" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.944546 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-tf4br" event={"ID":"c6763071-ba0b-4ef7-9843-9a4c66fe4a6f","Type":"ContainerDied","Data":"16b7f0e5ea1df1b770a4698770f3ad4ee14fa01d9831e11036893577c954cad0"} Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.944577 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="16b7f0e5ea1df1b770a4698770f3ad4ee14fa01d9831e11036893577c954cad0" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.944629 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-tf4br" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.950442 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gq8bc" event={"ID":"3171d16d-db61-4d69-b9c7-262da016be91","Type":"ContainerDied","Data":"2d348f9f14aa3489f9cc26b8e9391b4cf555ed20668079e69b8f66e613e003ed"} Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.950486 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d348f9f14aa3489f9cc26b8e9391b4cf555ed20668079e69b8f66e613e003ed" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.950551 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gq8bc" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.975629 4631 scope.go:117] "RemoveContainer" containerID="93d66d7230be80b7806ab9315d4f7686c911ae191d77588b16c7e39074be779d" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.976156 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3171d16d-db61-4d69-b9c7-262da016be91" (UID: "3171d16d-db61-4d69-b9c7-262da016be91"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.982575 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:35 crc kubenswrapper[4631]: I1204 17:50:35.982607 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zlgh\" (UniqueName: \"kubernetes.io/projected/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-kube-api-access-2zlgh\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.027259 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.027318 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.027388 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.029216 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"748b1412c888c95d08adbf71c3c971aef4060a8000682c7031f16f9f8ee657ac"} pod="openshift-machine-config-operator/machine-config-daemon-q27wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.029293 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" containerID="cri-o://748b1412c888c95d08adbf71c3c971aef4060a8000682c7031f16f9f8ee657ac" gracePeriod=600 Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.044747 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-config-data" (OuterVolumeSpecName: "config-data") pod "c6763071-ba0b-4ef7-9843-9a4c66fe4a6f" (UID: "c6763071-ba0b-4ef7-9843-9a4c66fe4a6f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.044987 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-config-data" (OuterVolumeSpecName: "config-data") pod "3171d16d-db61-4d69-b9c7-262da016be91" (UID: "3171d16d-db61-4d69-b9c7-262da016be91"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.067767 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c6763071-ba0b-4ef7-9843-9a4c66fe4a6f" (UID: "c6763071-ba0b-4ef7-9843-9a4c66fe4a6f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.084542 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.084562 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.084572 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3171d16d-db61-4d69-b9c7-262da016be91-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.126873 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.170087 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2b9c86a8-914f-4bd7-a3f8-7065e92a4b70" (UID: "2b9c86a8-914f-4bd7-a3f8-7065e92a4b70"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.190262 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.199279 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.217113 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-config" (OuterVolumeSpecName: "config") pod "2b9c86a8-914f-4bd7-a3f8-7065e92a4b70" (UID: "2b9c86a8-914f-4bd7-a3f8-7065e92a4b70"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.223157 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2b9c86a8-914f-4bd7-a3f8-7065e92a4b70" (UID: "2b9c86a8-914f-4bd7-a3f8-7065e92a4b70"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.229125 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2b9c86a8-914f-4bd7-a3f8-7065e92a4b70" (UID: "2b9c86a8-914f-4bd7-a3f8-7065e92a4b70"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.232748 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2b9c86a8-914f-4bd7-a3f8-7065e92a4b70" (UID: "2b9c86a8-914f-4bd7-a3f8-7065e92a4b70"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.291344 4631 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.291388 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.291399 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.291408 4631 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.328100 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-h4pr5"] Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.338738 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-h4pr5"] Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.856020 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-589bf6fb8-62vft"] Dec 04 17:50:36 crc kubenswrapper[4631]: E1204 17:50:36.856659 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b9c86a8-914f-4bd7-a3f8-7065e92a4b70" containerName="dnsmasq-dns" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.856671 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b9c86a8-914f-4bd7-a3f8-7065e92a4b70" containerName="dnsmasq-dns" Dec 04 17:50:36 crc kubenswrapper[4631]: E1204 17:50:36.856690 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b9c86a8-914f-4bd7-a3f8-7065e92a4b70" containerName="init" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.856696 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b9c86a8-914f-4bd7-a3f8-7065e92a4b70" containerName="init" Dec 04 17:50:36 crc kubenswrapper[4631]: E1204 17:50:36.856706 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6763071-ba0b-4ef7-9843-9a4c66fe4a6f" containerName="placement-db-sync" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.856712 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6763071-ba0b-4ef7-9843-9a4c66fe4a6f" containerName="placement-db-sync" Dec 04 17:50:36 crc kubenswrapper[4631]: E1204 17:50:36.856733 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3171d16d-db61-4d69-b9c7-262da016be91" containerName="keystone-bootstrap" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.856739 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="3171d16d-db61-4d69-b9c7-262da016be91" containerName="keystone-bootstrap" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.856917 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6763071-ba0b-4ef7-9843-9a4c66fe4a6f" containerName="placement-db-sync" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.856932 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="3171d16d-db61-4d69-b9c7-262da016be91" containerName="keystone-bootstrap" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.856949 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b9c86a8-914f-4bd7-a3f8-7065e92a4b70" containerName="dnsmasq-dns" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.857480 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.862104 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.862123 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.868477 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.868495 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.868632 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-sxcqt" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.868739 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.879344 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-589bf6fb8-62vft"] Dec 04 17:50:36 crc kubenswrapper[4631]: I1204 17:50:36.992057 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cb490f12-7a53-4fbd-a994-1c0ae225f253","Type":"ContainerStarted","Data":"daad094b5838d642c4ef6d67b4c56d0415b9357c25735974b928d6c76d500cf6"} Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.015532 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-776f95766d-5qctj"] Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.017339 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.021691 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.021883 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-9gz46" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.022026 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.025555 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-scripts\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.025602 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-config-data\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.025621 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-internal-tls-certs\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.025644 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-fernet-keys\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.025675 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-public-tls-certs\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.025706 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5tjr\" (UniqueName: \"kubernetes.io/projected/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-kube-api-access-j5tjr\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.025751 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-credential-keys\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.025772 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-combined-ca-bundle\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.025915 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.031799 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-776f95766d-5qctj"] Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.033193 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.059648 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc7c63c8-d4b4-49fd-81a7-1720d58d2934","Type":"ContainerStarted","Data":"192511984f822b6322fa18239ccfb59efad1f3895c2fcffe5a0781b153a3b284"} Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.118944 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerID="748b1412c888c95d08adbf71c3c971aef4060a8000682c7031f16f9f8ee657ac" exitCode=0 Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.119014 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerDied","Data":"748b1412c888c95d08adbf71c3c971aef4060a8000682c7031f16f9f8ee657ac"} Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.119043 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009"} Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.119061 4631 scope.go:117] "RemoveContainer" containerID="2b743632dd2ae2acbde167ee221ee6a9a0928b1076cfc48d3d7e9758476527f1" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.127870 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5tjr\" (UniqueName: \"kubernetes.io/projected/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-kube-api-access-j5tjr\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.127924 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b648789-3c38-485a-ad71-70566e8684fb-public-tls-certs\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.127968 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b648789-3c38-485a-ad71-70566e8684fb-scripts\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.128003 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-credential-keys\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.128022 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b648789-3c38-485a-ad71-70566e8684fb-logs\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.128050 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-combined-ca-bundle\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.128073 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b648789-3c38-485a-ad71-70566e8684fb-combined-ca-bundle\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.128123 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b648789-3c38-485a-ad71-70566e8684fb-config-data\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.128181 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-scripts\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.128202 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7zpf\" (UniqueName: \"kubernetes.io/projected/4b648789-3c38-485a-ad71-70566e8684fb-kube-api-access-c7zpf\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.128229 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b648789-3c38-485a-ad71-70566e8684fb-internal-tls-certs\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.128262 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-config-data\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.128285 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-internal-tls-certs\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.128309 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-fernet-keys\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.128341 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-public-tls-certs\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.138000 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-credential-keys\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.146397 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-fernet-keys\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.146426 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-internal-tls-certs\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.154521 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-config-data\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.158344 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-scripts\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.158619 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-combined-ca-bundle\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.159974 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-public-tls-certs\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.166134 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jbcc2" event={"ID":"d0aff05c-75cd-495a-903e-83b72596bf86","Type":"ContainerStarted","Data":"ce6f763c15253453cfaa81b44591a8da15943491c7ec49411fff8ccb2918bdf8"} Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.170896 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f065ae96-43d1-4d55-824e-76922d68ddc9","Type":"ContainerStarted","Data":"4477f7849ec9809bf8ae543570e7f087b1c186c080c9d4f14a193495e37fd43d"} Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.171811 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5tjr\" (UniqueName: \"kubernetes.io/projected/138d8c39-c5e9-48bf-83b7-efc22bc3ec1e-kube-api-access-j5tjr\") pod \"keystone-589bf6fb8-62vft\" (UID: \"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e\") " pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.181015 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.199490 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-jbcc2" podStartSLOduration=3.669493076 podStartE2EDuration="54.199473123s" podCreationTimestamp="2025-12-04 17:49:43 +0000 UTC" firstStartedPulling="2025-12-04 17:49:45.041708321 +0000 UTC m=+1315.073950319" lastFinishedPulling="2025-12-04 17:50:35.571688368 +0000 UTC m=+1365.603930366" observedRunningTime="2025-12-04 17:50:37.198879286 +0000 UTC m=+1367.231121284" watchObservedRunningTime="2025-12-04 17:50:37.199473123 +0000 UTC m=+1367.231715121" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.230223 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b648789-3c38-485a-ad71-70566e8684fb-config-data\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.230355 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7zpf\" (UniqueName: \"kubernetes.io/projected/4b648789-3c38-485a-ad71-70566e8684fb-kube-api-access-c7zpf\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.230826 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b648789-3c38-485a-ad71-70566e8684fb-internal-tls-certs\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.231968 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b648789-3c38-485a-ad71-70566e8684fb-public-tls-certs\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.232090 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b648789-3c38-485a-ad71-70566e8684fb-scripts\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.232140 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b648789-3c38-485a-ad71-70566e8684fb-logs\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.232188 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b648789-3c38-485a-ad71-70566e8684fb-combined-ca-bundle\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.236438 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b648789-3c38-485a-ad71-70566e8684fb-internal-tls-certs\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.236657 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b648789-3c38-485a-ad71-70566e8684fb-logs\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.238105 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b648789-3c38-485a-ad71-70566e8684fb-public-tls-certs\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.238852 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b648789-3c38-485a-ad71-70566e8684fb-combined-ca-bundle\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.239785 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b648789-3c38-485a-ad71-70566e8684fb-config-data\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.242497 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b648789-3c38-485a-ad71-70566e8684fb-scripts\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.246848 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7zpf\" (UniqueName: \"kubernetes.io/projected/4b648789-3c38-485a-ad71-70566e8684fb-kube-api-access-c7zpf\") pod \"placement-776f95766d-5qctj\" (UID: \"4b648789-3c38-485a-ad71-70566e8684fb\") " pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:37 crc kubenswrapper[4631]: I1204 17:50:37.369735 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:38 crc kubenswrapper[4631]: I1204 17:50:38.182775 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-49bhb" event={"ID":"caa9015d-d530-4caa-8a24-2338d69519a3","Type":"ContainerStarted","Data":"a3ecb284e0b4f23844c2fcacf3a2193c79bcf9be9efd4e20429073bd95d4a669"} Dec 04 17:50:38 crc kubenswrapper[4631]: I1204 17:50:38.290224 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b9c86a8-914f-4bd7-a3f8-7065e92a4b70" path="/var/lib/kubelet/pods/2b9c86a8-914f-4bd7-a3f8-7065e92a4b70/volumes" Dec 04 17:50:38 crc kubenswrapper[4631]: I1204 17:50:38.291179 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-776f95766d-5qctj"] Dec 04 17:50:38 crc kubenswrapper[4631]: I1204 17:50:38.400826 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-589bf6fb8-62vft"] Dec 04 17:50:39 crc kubenswrapper[4631]: I1204 17:50:39.226451 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-589bf6fb8-62vft" event={"ID":"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e","Type":"ContainerStarted","Data":"4f6bcdc904d92861c78500cdb4507aa1b126d7c3b88e684b54a761ef2253f246"} Dec 04 17:50:39 crc kubenswrapper[4631]: I1204 17:50:39.227124 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-589bf6fb8-62vft" event={"ID":"138d8c39-c5e9-48bf-83b7-efc22bc3ec1e","Type":"ContainerStarted","Data":"4b5f844c85b014c776daf2801edb3c8e516d66a29c48bbf35937bfc7b3ede298"} Dec 04 17:50:39 crc kubenswrapper[4631]: I1204 17:50:39.227153 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:50:39 crc kubenswrapper[4631]: I1204 17:50:39.232836 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f065ae96-43d1-4d55-824e-76922d68ddc9","Type":"ContainerStarted","Data":"6cd0ebbd21921f22396c0a9558824a76a6b41a1003b0bf57d4fecb86a249ce50"} Dec 04 17:50:39 crc kubenswrapper[4631]: I1204 17:50:39.234571 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cb490f12-7a53-4fbd-a994-1c0ae225f253","Type":"ContainerStarted","Data":"bb62f1d046e85e252f53059ea99a0f32bee50f63379d75f37462c755d4e11913"} Dec 04 17:50:39 crc kubenswrapper[4631]: I1204 17:50:39.238051 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-776f95766d-5qctj" event={"ID":"4b648789-3c38-485a-ad71-70566e8684fb","Type":"ContainerStarted","Data":"6dbf60723dd67e6d9ec875c43fc9636beec7130bcd3be168ec24114ec254edce"} Dec 04 17:50:39 crc kubenswrapper[4631]: I1204 17:50:39.238077 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-776f95766d-5qctj" event={"ID":"4b648789-3c38-485a-ad71-70566e8684fb","Type":"ContainerStarted","Data":"beb41324f288bee195bcb1921be5e95a008ac3c366fedb55b7f4a96fd329c6d1"} Dec 04 17:50:39 crc kubenswrapper[4631]: I1204 17:50:39.238090 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:39 crc kubenswrapper[4631]: I1204 17:50:39.238098 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-776f95766d-5qctj" event={"ID":"4b648789-3c38-485a-ad71-70566e8684fb","Type":"ContainerStarted","Data":"32a1d0c55da99d9462a1403cb398a331d742b7489a5f4edc063938db4cc63824"} Dec 04 17:50:39 crc kubenswrapper[4631]: I1204 17:50:39.238117 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-776f95766d-5qctj" Dec 04 17:50:39 crc kubenswrapper[4631]: I1204 17:50:39.249956 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-589bf6fb8-62vft" podStartSLOduration=3.249937857 podStartE2EDuration="3.249937857s" podCreationTimestamp="2025-12-04 17:50:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:50:39.240267041 +0000 UTC m=+1369.272509039" watchObservedRunningTime="2025-12-04 17:50:39.249937857 +0000 UTC m=+1369.282179855" Dec 04 17:50:39 crc kubenswrapper[4631]: I1204 17:50:39.266529 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-49bhb" podStartSLOduration=5.165010583 podStartE2EDuration="56.26649381s" podCreationTimestamp="2025-12-04 17:49:43 +0000 UTC" firstStartedPulling="2025-12-04 17:49:44.510541733 +0000 UTC m=+1314.542783731" lastFinishedPulling="2025-12-04 17:50:35.61202496 +0000 UTC m=+1365.644266958" observedRunningTime="2025-12-04 17:50:39.25950347 +0000 UTC m=+1369.291745468" watchObservedRunningTime="2025-12-04 17:50:39.26649381 +0000 UTC m=+1369.298735808" Dec 04 17:50:39 crc kubenswrapper[4631]: I1204 17:50:39.295062 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-776f95766d-5qctj" podStartSLOduration=3.295041695 podStartE2EDuration="3.295041695s" podCreationTimestamp="2025-12-04 17:50:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:50:39.282461406 +0000 UTC m=+1369.314703404" watchObservedRunningTime="2025-12-04 17:50:39.295041695 +0000 UTC m=+1369.327283703" Dec 04 17:50:42 crc kubenswrapper[4631]: I1204 17:50:42.289993 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f065ae96-43d1-4d55-824e-76922d68ddc9","Type":"ContainerStarted","Data":"1540969c51c3402e1f5a9a53302b64eaba6daf28ffa8dd622ae4637919a90d1e"} Dec 04 17:50:42 crc kubenswrapper[4631]: I1204 17:50:42.295831 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cb490f12-7a53-4fbd-a994-1c0ae225f253","Type":"ContainerStarted","Data":"711e5e47379a65f306ba54cfebf210735b505aacea97f4f8139a420869205675"} Dec 04 17:50:42 crc kubenswrapper[4631]: I1204 17:50:42.329297 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=15.329279222 podStartE2EDuration="15.329279222s" podCreationTimestamp="2025-12-04 17:50:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:50:42.322322904 +0000 UTC m=+1372.354564922" watchObservedRunningTime="2025-12-04 17:50:42.329279222 +0000 UTC m=+1372.361521220" Dec 04 17:50:42 crc kubenswrapper[4631]: I1204 17:50:42.351240 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=15.351220829 podStartE2EDuration="15.351220829s" podCreationTimestamp="2025-12-04 17:50:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:50:42.346488734 +0000 UTC m=+1372.378730742" watchObservedRunningTime="2025-12-04 17:50:42.351220829 +0000 UTC m=+1372.383462827" Dec 04 17:50:42 crc kubenswrapper[4631]: I1204 17:50:42.875923 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7b99dd8d64-9nrvl" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Dec 04 17:50:43 crc kubenswrapper[4631]: I1204 17:50:43.001973 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-77d5fd455b-8kwkp" podUID="78aafb4d-470c-477d-bfe6-5b7a29b79fc0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Dec 04 17:50:48 crc kubenswrapper[4631]: I1204 17:50:48.289497 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 04 17:50:48 crc kubenswrapper[4631]: I1204 17:50:48.290149 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 04 17:50:48 crc kubenswrapper[4631]: I1204 17:50:48.348613 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 04 17:50:48 crc kubenswrapper[4631]: I1204 17:50:48.350011 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 04 17:50:48 crc kubenswrapper[4631]: I1204 17:50:48.350630 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 04 17:50:48 crc kubenswrapper[4631]: I1204 17:50:48.656200 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 04 17:50:48 crc kubenswrapper[4631]: I1204 17:50:48.656551 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 04 17:50:48 crc kubenswrapper[4631]: I1204 17:50:48.702245 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 04 17:50:48 crc kubenswrapper[4631]: I1204 17:50:48.708635 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 04 17:50:49 crc kubenswrapper[4631]: I1204 17:50:49.367040 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 04 17:50:49 crc kubenswrapper[4631]: I1204 17:50:49.367079 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 04 17:50:49 crc kubenswrapper[4631]: I1204 17:50:49.367245 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 04 17:50:49 crc kubenswrapper[4631]: I1204 17:50:49.501762 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:50:49 crc kubenswrapper[4631]: E1204 17:50:49.700397 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="fc7c63c8-d4b4-49fd-81a7-1720d58d2934" Dec 04 17:50:50 crc kubenswrapper[4631]: I1204 17:50:50.375577 4631 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 17:50:50 crc kubenswrapper[4631]: I1204 17:50:50.375632 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc7c63c8-d4b4-49fd-81a7-1720d58d2934","Type":"ContainerStarted","Data":"0ad11945b92fb9a7c2a3e91ceb699610be49d81d237194af65371838484db3b7"} Dec 04 17:50:50 crc kubenswrapper[4631]: I1204 17:50:50.375984 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fc7c63c8-d4b4-49fd-81a7-1720d58d2934" containerName="ceilometer-notification-agent" containerID="cri-o://4109a6077f4a790a013d97cbb7037357e84f36997e835618b3d47f18252e6c46" gracePeriod=30 Dec 04 17:50:50 crc kubenswrapper[4631]: I1204 17:50:50.376093 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fc7c63c8-d4b4-49fd-81a7-1720d58d2934" containerName="proxy-httpd" containerID="cri-o://0ad11945b92fb9a7c2a3e91ceb699610be49d81d237194af65371838484db3b7" gracePeriod=30 Dec 04 17:50:50 crc kubenswrapper[4631]: I1204 17:50:50.376131 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fc7c63c8-d4b4-49fd-81a7-1720d58d2934" containerName="sg-core" containerID="cri-o://192511984f822b6322fa18239ccfb59efad1f3895c2fcffe5a0781b153a3b284" gracePeriod=30 Dec 04 17:50:51 crc kubenswrapper[4631]: I1204 17:50:51.390685 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc7c63c8-d4b4-49fd-81a7-1720d58d2934" containerID="0ad11945b92fb9a7c2a3e91ceb699610be49d81d237194af65371838484db3b7" exitCode=0 Dec 04 17:50:51 crc kubenswrapper[4631]: I1204 17:50:51.390998 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc7c63c8-d4b4-49fd-81a7-1720d58d2934" containerID="192511984f822b6322fa18239ccfb59efad1f3895c2fcffe5a0781b153a3b284" exitCode=2 Dec 04 17:50:51 crc kubenswrapper[4631]: I1204 17:50:51.391071 4631 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 17:50:51 crc kubenswrapper[4631]: I1204 17:50:51.391086 4631 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 17:50:51 crc kubenswrapper[4631]: I1204 17:50:51.390753 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc7c63c8-d4b4-49fd-81a7-1720d58d2934","Type":"ContainerDied","Data":"0ad11945b92fb9a7c2a3e91ceb699610be49d81d237194af65371838484db3b7"} Dec 04 17:50:51 crc kubenswrapper[4631]: I1204 17:50:51.391385 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc7c63c8-d4b4-49fd-81a7-1720d58d2934","Type":"ContainerDied","Data":"192511984f822b6322fa18239ccfb59efad1f3895c2fcffe5a0781b153a3b284"} Dec 04 17:50:51 crc kubenswrapper[4631]: I1204 17:50:51.391490 4631 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 17:50:51 crc kubenswrapper[4631]: I1204 17:50:51.391500 4631 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 17:50:52 crc kubenswrapper[4631]: I1204 17:50:52.406608 4631 generic.go:334] "Generic (PLEG): container finished" podID="d0aff05c-75cd-495a-903e-83b72596bf86" containerID="ce6f763c15253453cfaa81b44591a8da15943491c7ec49411fff8ccb2918bdf8" exitCode=0 Dec 04 17:50:52 crc kubenswrapper[4631]: I1204 17:50:52.406726 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jbcc2" event={"ID":"d0aff05c-75cd-495a-903e-83b72596bf86","Type":"ContainerDied","Data":"ce6f763c15253453cfaa81b44591a8da15943491c7ec49411fff8ccb2918bdf8"} Dec 04 17:50:52 crc kubenswrapper[4631]: I1204 17:50:52.535438 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 04 17:50:52 crc kubenswrapper[4631]: I1204 17:50:52.535568 4631 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 17:50:52 crc kubenswrapper[4631]: I1204 17:50:52.563645 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 04 17:50:52 crc kubenswrapper[4631]: I1204 17:50:52.563842 4631 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 17:50:52 crc kubenswrapper[4631]: I1204 17:50:52.564341 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 04 17:50:52 crc kubenswrapper[4631]: I1204 17:50:52.712057 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 04 17:50:52 crc kubenswrapper[4631]: I1204 17:50:52.875755 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7b99dd8d64-9nrvl" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Dec 04 17:50:53 crc kubenswrapper[4631]: I1204 17:50:53.000913 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-77d5fd455b-8kwkp" podUID="78aafb4d-470c-477d-bfe6-5b7a29b79fc0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Dec 04 17:50:53 crc kubenswrapper[4631]: I1204 17:50:53.415498 4631 generic.go:334] "Generic (PLEG): container finished" podID="caa9015d-d530-4caa-8a24-2338d69519a3" containerID="a3ecb284e0b4f23844c2fcacf3a2193c79bcf9be9efd4e20429073bd95d4a669" exitCode=0 Dec 04 17:50:53 crc kubenswrapper[4631]: I1204 17:50:53.416286 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-49bhb" event={"ID":"caa9015d-d530-4caa-8a24-2338d69519a3","Type":"ContainerDied","Data":"a3ecb284e0b4f23844c2fcacf3a2193c79bcf9be9efd4e20429073bd95d4a669"} Dec 04 17:50:53 crc kubenswrapper[4631]: I1204 17:50:53.807268 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jbcc2" Dec 04 17:50:53 crc kubenswrapper[4631]: I1204 17:50:53.884183 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0aff05c-75cd-495a-903e-83b72596bf86-combined-ca-bundle\") pod \"d0aff05c-75cd-495a-903e-83b72596bf86\" (UID: \"d0aff05c-75cd-495a-903e-83b72596bf86\") " Dec 04 17:50:53 crc kubenswrapper[4631]: I1204 17:50:53.884333 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dt5fr\" (UniqueName: \"kubernetes.io/projected/d0aff05c-75cd-495a-903e-83b72596bf86-kube-api-access-dt5fr\") pod \"d0aff05c-75cd-495a-903e-83b72596bf86\" (UID: \"d0aff05c-75cd-495a-903e-83b72596bf86\") " Dec 04 17:50:53 crc kubenswrapper[4631]: I1204 17:50:53.884436 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d0aff05c-75cd-495a-903e-83b72596bf86-db-sync-config-data\") pod \"d0aff05c-75cd-495a-903e-83b72596bf86\" (UID: \"d0aff05c-75cd-495a-903e-83b72596bf86\") " Dec 04 17:50:53 crc kubenswrapper[4631]: I1204 17:50:53.890782 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0aff05c-75cd-495a-903e-83b72596bf86-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "d0aff05c-75cd-495a-903e-83b72596bf86" (UID: "d0aff05c-75cd-495a-903e-83b72596bf86"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:53 crc kubenswrapper[4631]: I1204 17:50:53.901529 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0aff05c-75cd-495a-903e-83b72596bf86-kube-api-access-dt5fr" (OuterVolumeSpecName: "kube-api-access-dt5fr") pod "d0aff05c-75cd-495a-903e-83b72596bf86" (UID: "d0aff05c-75cd-495a-903e-83b72596bf86"). InnerVolumeSpecName "kube-api-access-dt5fr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:50:53 crc kubenswrapper[4631]: I1204 17:50:53.924594 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0aff05c-75cd-495a-903e-83b72596bf86-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d0aff05c-75cd-495a-903e-83b72596bf86" (UID: "d0aff05c-75cd-495a-903e-83b72596bf86"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:53 crc kubenswrapper[4631]: I1204 17:50:53.986414 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0aff05c-75cd-495a-903e-83b72596bf86-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:53 crc kubenswrapper[4631]: I1204 17:50:53.986448 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dt5fr\" (UniqueName: \"kubernetes.io/projected/d0aff05c-75cd-495a-903e-83b72596bf86-kube-api-access-dt5fr\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:53 crc kubenswrapper[4631]: I1204 17:50:53.986461 4631 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d0aff05c-75cd-495a-903e-83b72596bf86-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.457578 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc7c63c8-d4b4-49fd-81a7-1720d58d2934" containerID="4109a6077f4a790a013d97cbb7037357e84f36997e835618b3d47f18252e6c46" exitCode=0 Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.457857 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc7c63c8-d4b4-49fd-81a7-1720d58d2934","Type":"ContainerDied","Data":"4109a6077f4a790a013d97cbb7037357e84f36997e835618b3d47f18252e6c46"} Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.474199 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jbcc2" Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.475432 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jbcc2" event={"ID":"d0aff05c-75cd-495a-903e-83b72596bf86","Type":"ContainerDied","Data":"1ca03bb7dfdd5e9db1464866a5d873ae88bcec7bc54dbab686759d06205e6add"} Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.475467 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ca03bb7dfdd5e9db1464866a5d873ae88bcec7bc54dbab686759d06205e6add" Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.579727 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.737356 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xbdd\" (UniqueName: \"kubernetes.io/projected/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-kube-api-access-8xbdd\") pod \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.738397 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-sg-core-conf-yaml\") pod \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.738490 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-log-httpd\") pod \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.738611 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-combined-ca-bundle\") pod \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.738718 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-scripts\") pod \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.738856 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-run-httpd\") pod \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.739005 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fc7c63c8-d4b4-49fd-81a7-1720d58d2934" (UID: "fc7c63c8-d4b4-49fd-81a7-1720d58d2934"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.739300 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fc7c63c8-d4b4-49fd-81a7-1720d58d2934" (UID: "fc7c63c8-d4b4-49fd-81a7-1720d58d2934"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.739417 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-config-data\") pod \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\" (UID: \"fc7c63c8-d4b4-49fd-81a7-1720d58d2934\") " Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.744388 4631 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.748184 4631 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.748693 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-kube-api-access-8xbdd" (OuterVolumeSpecName: "kube-api-access-8xbdd") pod "fc7c63c8-d4b4-49fd-81a7-1720d58d2934" (UID: "fc7c63c8-d4b4-49fd-81a7-1720d58d2934"). InnerVolumeSpecName "kube-api-access-8xbdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.758572 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-scripts" (OuterVolumeSpecName: "scripts") pod "fc7c63c8-d4b4-49fd-81a7-1720d58d2934" (UID: "fc7c63c8-d4b4-49fd-81a7-1720d58d2934"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.810355 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "fc7c63c8-d4b4-49fd-81a7-1720d58d2934" (UID: "fc7c63c8-d4b4-49fd-81a7-1720d58d2934"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.877021 4631 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.877065 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:54 crc kubenswrapper[4631]: I1204 17:50:54.877075 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xbdd\" (UniqueName: \"kubernetes.io/projected/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-kube-api-access-8xbdd\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.002410 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-75cd87c688-xlr7b"] Dec 04 17:50:55 crc kubenswrapper[4631]: E1204 17:50:55.004427 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc7c63c8-d4b4-49fd-81a7-1720d58d2934" containerName="proxy-httpd" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.004455 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc7c63c8-d4b4-49fd-81a7-1720d58d2934" containerName="proxy-httpd" Dec 04 17:50:55 crc kubenswrapper[4631]: E1204 17:50:55.004482 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc7c63c8-d4b4-49fd-81a7-1720d58d2934" containerName="sg-core" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.004488 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc7c63c8-d4b4-49fd-81a7-1720d58d2934" containerName="sg-core" Dec 04 17:50:55 crc kubenswrapper[4631]: E1204 17:50:55.004512 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0aff05c-75cd-495a-903e-83b72596bf86" containerName="barbican-db-sync" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.004520 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0aff05c-75cd-495a-903e-83b72596bf86" containerName="barbican-db-sync" Dec 04 17:50:55 crc kubenswrapper[4631]: E1204 17:50:55.004557 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc7c63c8-d4b4-49fd-81a7-1720d58d2934" containerName="ceilometer-notification-agent" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.004563 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc7c63c8-d4b4-49fd-81a7-1720d58d2934" containerName="ceilometer-notification-agent" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.004863 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc7c63c8-d4b4-49fd-81a7-1720d58d2934" containerName="proxy-httpd" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.004879 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0aff05c-75cd-495a-903e-83b72596bf86" containerName="barbican-db-sync" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.004900 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc7c63c8-d4b4-49fd-81a7-1720d58d2934" containerName="sg-core" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.004921 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc7c63c8-d4b4-49fd-81a7-1720d58d2934" containerName="ceilometer-notification-agent" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.055513 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.071269 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-49bhb" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.092566 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5516312-5bde-4c7d-8910-bf75f2a98812-combined-ca-bundle\") pod \"barbican-keystone-listener-75cd87c688-xlr7b\" (UID: \"b5516312-5bde-4c7d-8910-bf75f2a98812\") " pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.092619 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrxvm\" (UniqueName: \"kubernetes.io/projected/b5516312-5bde-4c7d-8910-bf75f2a98812-kube-api-access-mrxvm\") pod \"barbican-keystone-listener-75cd87c688-xlr7b\" (UID: \"b5516312-5bde-4c7d-8910-bf75f2a98812\") " pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.092671 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5516312-5bde-4c7d-8910-bf75f2a98812-logs\") pod \"barbican-keystone-listener-75cd87c688-xlr7b\" (UID: \"b5516312-5bde-4c7d-8910-bf75f2a98812\") " pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.092702 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b5516312-5bde-4c7d-8910-bf75f2a98812-config-data-custom\") pod \"barbican-keystone-listener-75cd87c688-xlr7b\" (UID: \"b5516312-5bde-4c7d-8910-bf75f2a98812\") " pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.092724 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5516312-5bde-4c7d-8910-bf75f2a98812-config-data\") pod \"barbican-keystone-listener-75cd87c688-xlr7b\" (UID: \"b5516312-5bde-4c7d-8910-bf75f2a98812\") " pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.093162 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-8nlq9" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.104564 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.104823 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.105485 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-config-data" (OuterVolumeSpecName: "config-data") pod "fc7c63c8-d4b4-49fd-81a7-1720d58d2934" (UID: "fc7c63c8-d4b4-49fd-81a7-1720d58d2934"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.117511 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-75cd87c688-xlr7b"] Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.128717 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-f9b7c48cf-xdj7r"] Dec 04 17:50:55 crc kubenswrapper[4631]: E1204 17:50:55.129073 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="caa9015d-d530-4caa-8a24-2338d69519a3" containerName="cinder-db-sync" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.129085 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="caa9015d-d530-4caa-8a24-2338d69519a3" containerName="cinder-db-sync" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.129242 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="caa9015d-d530-4caa-8a24-2338d69519a3" containerName="cinder-db-sync" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.130276 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-f9b7c48cf-xdj7r" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.139467 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-f9b7c48cf-xdj7r"] Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.142081 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fc7c63c8-d4b4-49fd-81a7-1720d58d2934" (UID: "fc7c63c8-d4b4-49fd-81a7-1720d58d2934"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.161093 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.161259 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-n45p2"] Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.165673 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.194995 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-combined-ca-bundle\") pod \"caa9015d-d530-4caa-8a24-2338d69519a3\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195111 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqkdh\" (UniqueName: \"kubernetes.io/projected/caa9015d-d530-4caa-8a24-2338d69519a3-kube-api-access-hqkdh\") pod \"caa9015d-d530-4caa-8a24-2338d69519a3\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195150 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-db-sync-config-data\") pod \"caa9015d-d530-4caa-8a24-2338d69519a3\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195189 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/caa9015d-d530-4caa-8a24-2338d69519a3-etc-machine-id\") pod \"caa9015d-d530-4caa-8a24-2338d69519a3\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195248 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-scripts\") pod \"caa9015d-d530-4caa-8a24-2338d69519a3\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195274 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-config-data\") pod \"caa9015d-d530-4caa-8a24-2338d69519a3\" (UID: \"caa9015d-d530-4caa-8a24-2338d69519a3\") " Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195526 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-n45p2\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195593 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1de91a80-bddc-4f80-bf05-0d1aba161730-config-data\") pod \"barbican-worker-f9b7c48cf-xdj7r\" (UID: \"1de91a80-bddc-4f80-bf05-0d1aba161730\") " pod="openstack/barbican-worker-f9b7c48cf-xdj7r" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195613 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1de91a80-bddc-4f80-bf05-0d1aba161730-combined-ca-bundle\") pod \"barbican-worker-f9b7c48cf-xdj7r\" (UID: \"1de91a80-bddc-4f80-bf05-0d1aba161730\") " pod="openstack/barbican-worker-f9b7c48cf-xdj7r" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195655 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-n45p2\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195685 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5516312-5bde-4c7d-8910-bf75f2a98812-combined-ca-bundle\") pod \"barbican-keystone-listener-75cd87c688-xlr7b\" (UID: \"b5516312-5bde-4c7d-8910-bf75f2a98812\") " pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195700 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8vqw\" (UniqueName: \"kubernetes.io/projected/30909de0-7786-4946-a607-95b9b6e25832-kube-api-access-x8vqw\") pod \"dnsmasq-dns-75c8ddd69c-n45p2\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195754 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrxvm\" (UniqueName: \"kubernetes.io/projected/b5516312-5bde-4c7d-8910-bf75f2a98812-kube-api-access-mrxvm\") pod \"barbican-keystone-listener-75cd87c688-xlr7b\" (UID: \"b5516312-5bde-4c7d-8910-bf75f2a98812\") " pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195777 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-n45p2\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195811 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1de91a80-bddc-4f80-bf05-0d1aba161730-config-data-custom\") pod \"barbican-worker-f9b7c48cf-xdj7r\" (UID: \"1de91a80-bddc-4f80-bf05-0d1aba161730\") " pod="openstack/barbican-worker-f9b7c48cf-xdj7r" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195831 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1de91a80-bddc-4f80-bf05-0d1aba161730-logs\") pod \"barbican-worker-f9b7c48cf-xdj7r\" (UID: \"1de91a80-bddc-4f80-bf05-0d1aba161730\") " pod="openstack/barbican-worker-f9b7c48cf-xdj7r" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195850 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-n45p2\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195884 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-config\") pod \"dnsmasq-dns-75c8ddd69c-n45p2\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195903 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w696t\" (UniqueName: \"kubernetes.io/projected/1de91a80-bddc-4f80-bf05-0d1aba161730-kube-api-access-w696t\") pod \"barbican-worker-f9b7c48cf-xdj7r\" (UID: \"1de91a80-bddc-4f80-bf05-0d1aba161730\") " pod="openstack/barbican-worker-f9b7c48cf-xdj7r" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195934 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5516312-5bde-4c7d-8910-bf75f2a98812-logs\") pod \"barbican-keystone-listener-75cd87c688-xlr7b\" (UID: \"b5516312-5bde-4c7d-8910-bf75f2a98812\") " pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.195984 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b5516312-5bde-4c7d-8910-bf75f2a98812-config-data-custom\") pod \"barbican-keystone-listener-75cd87c688-xlr7b\" (UID: \"b5516312-5bde-4c7d-8910-bf75f2a98812\") " pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.196007 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5516312-5bde-4c7d-8910-bf75f2a98812-config-data\") pod \"barbican-keystone-listener-75cd87c688-xlr7b\" (UID: \"b5516312-5bde-4c7d-8910-bf75f2a98812\") " pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.196082 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.196096 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc7c63c8-d4b4-49fd-81a7-1720d58d2934-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.205750 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/caa9015d-d530-4caa-8a24-2338d69519a3-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "caa9015d-d530-4caa-8a24-2338d69519a3" (UID: "caa9015d-d530-4caa-8a24-2338d69519a3"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.211175 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5516312-5bde-4c7d-8910-bf75f2a98812-logs\") pod \"barbican-keystone-listener-75cd87c688-xlr7b\" (UID: \"b5516312-5bde-4c7d-8910-bf75f2a98812\") " pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.217364 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-n45p2"] Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.227433 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "caa9015d-d530-4caa-8a24-2338d69519a3" (UID: "caa9015d-d530-4caa-8a24-2338d69519a3"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.233528 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-scripts" (OuterVolumeSpecName: "scripts") pod "caa9015d-d530-4caa-8a24-2338d69519a3" (UID: "caa9015d-d530-4caa-8a24-2338d69519a3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.239693 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/caa9015d-d530-4caa-8a24-2338d69519a3-kube-api-access-hqkdh" (OuterVolumeSpecName: "kube-api-access-hqkdh") pod "caa9015d-d530-4caa-8a24-2338d69519a3" (UID: "caa9015d-d530-4caa-8a24-2338d69519a3"). InnerVolumeSpecName "kube-api-access-hqkdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.241843 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b5516312-5bde-4c7d-8910-bf75f2a98812-config-data-custom\") pod \"barbican-keystone-listener-75cd87c688-xlr7b\" (UID: \"b5516312-5bde-4c7d-8910-bf75f2a98812\") " pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.243484 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5516312-5bde-4c7d-8910-bf75f2a98812-combined-ca-bundle\") pod \"barbican-keystone-listener-75cd87c688-xlr7b\" (UID: \"b5516312-5bde-4c7d-8910-bf75f2a98812\") " pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.261966 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5516312-5bde-4c7d-8910-bf75f2a98812-config-data\") pod \"barbican-keystone-listener-75cd87c688-xlr7b\" (UID: \"b5516312-5bde-4c7d-8910-bf75f2a98812\") " pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.267097 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrxvm\" (UniqueName: \"kubernetes.io/projected/b5516312-5bde-4c7d-8910-bf75f2a98812-kube-api-access-mrxvm\") pod \"barbican-keystone-listener-75cd87c688-xlr7b\" (UID: \"b5516312-5bde-4c7d-8910-bf75f2a98812\") " pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.282565 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "caa9015d-d530-4caa-8a24-2338d69519a3" (UID: "caa9015d-d530-4caa-8a24-2338d69519a3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.302710 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-n45p2\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.302754 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1de91a80-bddc-4f80-bf05-0d1aba161730-config-data\") pod \"barbican-worker-f9b7c48cf-xdj7r\" (UID: \"1de91a80-bddc-4f80-bf05-0d1aba161730\") " pod="openstack/barbican-worker-f9b7c48cf-xdj7r" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.302774 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1de91a80-bddc-4f80-bf05-0d1aba161730-combined-ca-bundle\") pod \"barbican-worker-f9b7c48cf-xdj7r\" (UID: \"1de91a80-bddc-4f80-bf05-0d1aba161730\") " pod="openstack/barbican-worker-f9b7c48cf-xdj7r" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.302794 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-n45p2\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.302823 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8vqw\" (UniqueName: \"kubernetes.io/projected/30909de0-7786-4946-a607-95b9b6e25832-kube-api-access-x8vqw\") pod \"dnsmasq-dns-75c8ddd69c-n45p2\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.302868 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-n45p2\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.302895 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1de91a80-bddc-4f80-bf05-0d1aba161730-config-data-custom\") pod \"barbican-worker-f9b7c48cf-xdj7r\" (UID: \"1de91a80-bddc-4f80-bf05-0d1aba161730\") " pod="openstack/barbican-worker-f9b7c48cf-xdj7r" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.302920 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1de91a80-bddc-4f80-bf05-0d1aba161730-logs\") pod \"barbican-worker-f9b7c48cf-xdj7r\" (UID: \"1de91a80-bddc-4f80-bf05-0d1aba161730\") " pod="openstack/barbican-worker-f9b7c48cf-xdj7r" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.302948 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-n45p2\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.302970 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-config\") pod \"dnsmasq-dns-75c8ddd69c-n45p2\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.302990 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w696t\" (UniqueName: \"kubernetes.io/projected/1de91a80-bddc-4f80-bf05-0d1aba161730-kube-api-access-w696t\") pod \"barbican-worker-f9b7c48cf-xdj7r\" (UID: \"1de91a80-bddc-4f80-bf05-0d1aba161730\") " pod="openstack/barbican-worker-f9b7c48cf-xdj7r" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.303101 4631 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.303114 4631 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/caa9015d-d530-4caa-8a24-2338d69519a3-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.303127 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.303139 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.303151 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqkdh\" (UniqueName: \"kubernetes.io/projected/caa9015d-d530-4caa-8a24-2338d69519a3-kube-api-access-hqkdh\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.304311 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-n45p2\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.306400 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-n45p2\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.306685 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1de91a80-bddc-4f80-bf05-0d1aba161730-logs\") pod \"barbican-worker-f9b7c48cf-xdj7r\" (UID: \"1de91a80-bddc-4f80-bf05-0d1aba161730\") " pod="openstack/barbican-worker-f9b7c48cf-xdj7r" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.307195 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-config\") pod \"dnsmasq-dns-75c8ddd69c-n45p2\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.311845 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-n45p2\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.312788 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1de91a80-bddc-4f80-bf05-0d1aba161730-combined-ca-bundle\") pod \"barbican-worker-f9b7c48cf-xdj7r\" (UID: \"1de91a80-bddc-4f80-bf05-0d1aba161730\") " pod="openstack/barbican-worker-f9b7c48cf-xdj7r" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.314001 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-n45p2\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.335263 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1de91a80-bddc-4f80-bf05-0d1aba161730-config-data-custom\") pod \"barbican-worker-f9b7c48cf-xdj7r\" (UID: \"1de91a80-bddc-4f80-bf05-0d1aba161730\") " pod="openstack/barbican-worker-f9b7c48cf-xdj7r" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.336308 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1de91a80-bddc-4f80-bf05-0d1aba161730-config-data\") pod \"barbican-worker-f9b7c48cf-xdj7r\" (UID: \"1de91a80-bddc-4f80-bf05-0d1aba161730\") " pod="openstack/barbican-worker-f9b7c48cf-xdj7r" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.342438 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8vqw\" (UniqueName: \"kubernetes.io/projected/30909de0-7786-4946-a607-95b9b6e25832-kube-api-access-x8vqw\") pod \"dnsmasq-dns-75c8ddd69c-n45p2\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.345238 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-config-data" (OuterVolumeSpecName: "config-data") pod "caa9015d-d530-4caa-8a24-2338d69519a3" (UID: "caa9015d-d530-4caa-8a24-2338d69519a3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.350188 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w696t\" (UniqueName: \"kubernetes.io/projected/1de91a80-bddc-4f80-bf05-0d1aba161730-kube-api-access-w696t\") pod \"barbican-worker-f9b7c48cf-xdj7r\" (UID: \"1de91a80-bddc-4f80-bf05-0d1aba161730\") " pod="openstack/barbican-worker-f9b7c48cf-xdj7r" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.407538 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/caa9015d-d530-4caa-8a24-2338d69519a3-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.460881 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.514268 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-598cb64fd8-27j5s"] Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.515735 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.527263 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-49bhb" event={"ID":"caa9015d-d530-4caa-8a24-2338d69519a3","Type":"ContainerDied","Data":"dcc346068c456b64385b4cdeae483a29f60bfe2bc627be94faeaa506c129c931"} Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.527303 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dcc346068c456b64385b4cdeae483a29f60bfe2bc627be94faeaa506c129c931" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.527363 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-49bhb" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.534844 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.535314 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-f9b7c48cf-xdj7r" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.535604 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-598cb64fd8-27j5s"] Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.570058 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc7c63c8-d4b4-49fd-81a7-1720d58d2934","Type":"ContainerDied","Data":"46dcb6125da0d9205e08dcaf5026b594d4c50b7d5bad66e01511087ca9fea077"} Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.570125 4631 scope.go:117] "RemoveContainer" containerID="0ad11945b92fb9a7c2a3e91ceb699610be49d81d237194af65371838484db3b7" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.570146 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.570657 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.615646 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6d6c6b7549-c7hqg" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.675357 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.715006 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-logs\") pod \"barbican-api-598cb64fd8-27j5s\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.715085 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7p5x\" (UniqueName: \"kubernetes.io/projected/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-kube-api-access-x7p5x\") pod \"barbican-api-598cb64fd8-27j5s\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.715107 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-config-data-custom\") pod \"barbican-api-598cb64fd8-27j5s\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.715144 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-config-data\") pod \"barbican-api-598cb64fd8-27j5s\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.715234 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-combined-ca-bundle\") pod \"barbican-api-598cb64fd8-27j5s\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.726489 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.759484 4631 scope.go:117] "RemoveContainer" containerID="192511984f822b6322fa18239ccfb59efad1f3895c2fcffe5a0781b153a3b284" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.789491 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.795648 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.807303 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.808784 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.816278 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-combined-ca-bundle\") pod \"barbican-api-598cb64fd8-27j5s\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.816339 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-logs\") pod \"barbican-api-598cb64fd8-27j5s\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.816402 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7p5x\" (UniqueName: \"kubernetes.io/projected/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-kube-api-access-x7p5x\") pod \"barbican-api-598cb64fd8-27j5s\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.816426 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-config-data-custom\") pod \"barbican-api-598cb64fd8-27j5s\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.816455 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-config-data\") pod \"barbican-api-598cb64fd8-27j5s\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.816996 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-logs\") pod \"barbican-api-598cb64fd8-27j5s\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.838113 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-config-data-custom\") pod \"barbican-api-598cb64fd8-27j5s\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.842843 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-55d98d789b-mxmt5"] Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.846402 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-55d98d789b-mxmt5" podUID="a2a21943-309a-4d97-9e94-5f3248de544c" containerName="neutron-api" containerID="cri-o://05c8f34ef3cd9028ef6940053c7f05e99242048b15d5dea70493d4f3817ca6e3" gracePeriod=30 Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.846567 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-55d98d789b-mxmt5" podUID="a2a21943-309a-4d97-9e94-5f3248de544c" containerName="neutron-httpd" containerID="cri-o://c7234416a30da8efe9fd058bca01c7f0b459a50f43ae28b10faa94fc2225254a" gracePeriod=30 Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.852397 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-config-data\") pod \"barbican-api-598cb64fd8-27j5s\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.854937 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-combined-ca-bundle\") pod \"barbican-api-598cb64fd8-27j5s\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.879539 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.884325 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7p5x\" (UniqueName: \"kubernetes.io/projected/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-kube-api-access-x7p5x\") pod \"barbican-api-598cb64fd8-27j5s\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.888004 4631 scope.go:117] "RemoveContainer" containerID="4109a6077f4a790a013d97cbb7037357e84f36997e835618b3d47f18252e6c46" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.924698 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrl2t\" (UniqueName: \"kubernetes.io/projected/ec86a01c-35c1-4302-a746-843df2ed1e3b-kube-api-access-lrl2t\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.924776 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.924811 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec86a01c-35c1-4302-a746-843df2ed1e3b-log-httpd\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.924860 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-scripts\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.924887 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.924901 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-config-data\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.924922 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec86a01c-35c1-4302-a746-843df2ed1e3b-run-httpd\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.928083 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.930316 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.934439 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-f5vb9" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.934617 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.934812 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 04 17:50:55 crc kubenswrapper[4631]: I1204 17:50:55.934963 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:55.996067 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.035973 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.036067 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec86a01c-35c1-4302-a746-843df2ed1e3b-log-httpd\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.036193 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-scripts\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.036242 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.036598 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-config-data\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.036620 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec86a01c-35c1-4302-a746-843df2ed1e3b-run-httpd\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.036673 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrl2t\" (UniqueName: \"kubernetes.io/projected/ec86a01c-35c1-4302-a746-843df2ed1e3b-kube-api-access-lrl2t\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.040839 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec86a01c-35c1-4302-a746-843df2ed1e3b-log-httpd\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.047814 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec86a01c-35c1-4302-a746-843df2ed1e3b-run-httpd\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.053456 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.055470 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-config-data\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.057689 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-scripts\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.076513 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.106070 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrl2t\" (UniqueName: \"kubernetes.io/projected/ec86a01c-35c1-4302-a746-843df2ed1e3b-kube-api-access-lrl2t\") pod \"ceilometer-0\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " pod="openstack/ceilometer-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.143237 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7daff208-9b1a-43f7-8fdb-0cd054474578-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " pod="openstack/cinder-scheduler-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.143294 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " pod="openstack/cinder-scheduler-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.143361 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-config-data\") pod \"cinder-scheduler-0\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " pod="openstack/cinder-scheduler-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.143409 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-scripts\") pod \"cinder-scheduler-0\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " pod="openstack/cinder-scheduler-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.143429 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " pod="openstack/cinder-scheduler-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.143450 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwkgc\" (UniqueName: \"kubernetes.io/projected/7daff208-9b1a-43f7-8fdb-0cd054474578-kube-api-access-lwkgc\") pod \"cinder-scheduler-0\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " pod="openstack/cinder-scheduler-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.169760 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.211831 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.218835 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-n45p2"] Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.253106 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7daff208-9b1a-43f7-8fdb-0cd054474578-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " pod="openstack/cinder-scheduler-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.253164 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " pod="openstack/cinder-scheduler-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.253224 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-config-data\") pod \"cinder-scheduler-0\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " pod="openstack/cinder-scheduler-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.253254 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-scripts\") pod \"cinder-scheduler-0\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " pod="openstack/cinder-scheduler-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.253273 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " pod="openstack/cinder-scheduler-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.253298 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwkgc\" (UniqueName: \"kubernetes.io/projected/7daff208-9b1a-43f7-8fdb-0cd054474578-kube-api-access-lwkgc\") pod \"cinder-scheduler-0\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " pod="openstack/cinder-scheduler-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.257612 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-scripts\") pod \"cinder-scheduler-0\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " pod="openstack/cinder-scheduler-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.261147 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7daff208-9b1a-43f7-8fdb-0cd054474578-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " pod="openstack/cinder-scheduler-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.267100 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " pod="openstack/cinder-scheduler-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.273103 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-config-data\") pod \"cinder-scheduler-0\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " pod="openstack/cinder-scheduler-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.289127 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc7c63c8-d4b4-49fd-81a7-1720d58d2934" path="/var/lib/kubelet/pods/fc7c63c8-d4b4-49fd-81a7-1720d58d2934/volumes" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.312836 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " pod="openstack/cinder-scheduler-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.316671 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwkgc\" (UniqueName: \"kubernetes.io/projected/7daff208-9b1a-43f7-8fdb-0cd054474578-kube-api-access-lwkgc\") pod \"cinder-scheduler-0\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " pod="openstack/cinder-scheduler-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.325292 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-jkhnk"] Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.362290 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.385747 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-jkhnk"] Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.395852 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-jkhnk\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.396800 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-jkhnk\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.396855 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-dns-svc\") pod \"dnsmasq-dns-5784cf869f-jkhnk\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.396903 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6v96b\" (UniqueName: \"kubernetes.io/projected/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-kube-api-access-6v96b\") pod \"dnsmasq-dns-5784cf869f-jkhnk\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.396938 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-config\") pod \"dnsmasq-dns-5784cf869f-jkhnk\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.396982 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-jkhnk\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.498483 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6v96b\" (UniqueName: \"kubernetes.io/projected/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-kube-api-access-6v96b\") pod \"dnsmasq-dns-5784cf869f-jkhnk\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.498555 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-config\") pod \"dnsmasq-dns-5784cf869f-jkhnk\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.498602 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-jkhnk\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.499310 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-jkhnk\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.499388 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-jkhnk\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.499421 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-dns-svc\") pod \"dnsmasq-dns-5784cf869f-jkhnk\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.500395 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-dns-svc\") pod \"dnsmasq-dns-5784cf869f-jkhnk\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.501030 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-jkhnk\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.501562 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.503102 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.506449 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-jkhnk\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.514119 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-config\") pod \"dnsmasq-dns-5784cf869f-jkhnk\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.514241 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.517164 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.530131 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-jkhnk\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.581276 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.601827 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/953bfb33-b2e6-421f-b29f-127c1406800b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.601891 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.601922 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/953bfb33-b2e6-421f-b29f-127c1406800b-logs\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.601946 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-scripts\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.601984 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-config-data\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.602042 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nrj8\" (UniqueName: \"kubernetes.io/projected/953bfb33-b2e6-421f-b29f-127c1406800b-kube-api-access-9nrj8\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.602072 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-config-data-custom\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.606732 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6v96b\" (UniqueName: \"kubernetes.io/projected/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-kube-api-access-6v96b\") pod \"dnsmasq-dns-5784cf869f-jkhnk\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.703595 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nrj8\" (UniqueName: \"kubernetes.io/projected/953bfb33-b2e6-421f-b29f-127c1406800b-kube-api-access-9nrj8\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.703837 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-config-data-custom\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.703879 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/953bfb33-b2e6-421f-b29f-127c1406800b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.703917 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.703945 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/953bfb33-b2e6-421f-b29f-127c1406800b-logs\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.703965 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-scripts\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.704034 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/953bfb33-b2e6-421f-b29f-127c1406800b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.705530 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-config-data\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.709925 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/953bfb33-b2e6-421f-b29f-127c1406800b-logs\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.713601 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.714605 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.715051 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-config-data-custom\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.716066 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-config-data\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.720812 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nrj8\" (UniqueName: \"kubernetes.io/projected/953bfb33-b2e6-421f-b29f-127c1406800b-kube-api-access-9nrj8\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.739886 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-scripts\") pod \"cinder-api-0\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " pod="openstack/cinder-api-0" Dec 04 17:50:56 crc kubenswrapper[4631]: I1204 17:50:56.923794 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 04 17:50:57 crc kubenswrapper[4631]: I1204 17:50:57.097254 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-75cd87c688-xlr7b"] Dec 04 17:50:57 crc kubenswrapper[4631]: I1204 17:50:57.122705 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-n45p2"] Dec 04 17:50:57 crc kubenswrapper[4631]: I1204 17:50:57.159191 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-f9b7c48cf-xdj7r"] Dec 04 17:50:57 crc kubenswrapper[4631]: I1204 17:50:57.250863 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-598cb64fd8-27j5s"] Dec 04 17:50:57 crc kubenswrapper[4631]: I1204 17:50:57.304191 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:50:57 crc kubenswrapper[4631]: W1204 17:50:57.322250 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec86a01c_35c1_4302_a746_843df2ed1e3b.slice/crio-3730a193baa4b9803f54cec93f0df32aa11b00dd5cc4bb07142b2d87be8b3300 WatchSource:0}: Error finding container 3730a193baa4b9803f54cec93f0df32aa11b00dd5cc4bb07142b2d87be8b3300: Status 404 returned error can't find the container with id 3730a193baa4b9803f54cec93f0df32aa11b00dd5cc4bb07142b2d87be8b3300 Dec 04 17:50:57 crc kubenswrapper[4631]: I1204 17:50:57.415558 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 04 17:50:57 crc kubenswrapper[4631]: I1204 17:50:57.457922 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-jkhnk"] Dec 04 17:50:57 crc kubenswrapper[4631]: I1204 17:50:57.585857 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 04 17:50:57 crc kubenswrapper[4631]: I1204 17:50:57.674638 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7daff208-9b1a-43f7-8fdb-0cd054474578","Type":"ContainerStarted","Data":"2fb1125bde5008670fdc2ed094ad077339e584b535909b20b09cb72172fc9268"} Dec 04 17:50:57 crc kubenswrapper[4631]: I1204 17:50:57.722663 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" event={"ID":"ffcbb49b-28de-482a-a1c7-6d055f0cbf52","Type":"ContainerStarted","Data":"c94786d9ad8e5e993397a38e6961147cac0448a8ca02c467d9d998f27aebe538"} Dec 04 17:50:57 crc kubenswrapper[4631]: I1204 17:50:57.736508 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"953bfb33-b2e6-421f-b29f-127c1406800b","Type":"ContainerStarted","Data":"d963f491bf34b1ac1611be619b08293558b7031f84b3af25e675598763a3e144"} Dec 04 17:50:57 crc kubenswrapper[4631]: I1204 17:50:57.788739 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" event={"ID":"30909de0-7786-4946-a607-95b9b6e25832","Type":"ContainerStarted","Data":"00fb158a228684760e47cefe289a3daf609615d9d27a7068d769abd468a2f949"} Dec 04 17:50:57 crc kubenswrapper[4631]: I1204 17:50:57.822236 4631 generic.go:334] "Generic (PLEG): container finished" podID="a2a21943-309a-4d97-9e94-5f3248de544c" containerID="c7234416a30da8efe9fd058bca01c7f0b459a50f43ae28b10faa94fc2225254a" exitCode=0 Dec 04 17:50:57 crc kubenswrapper[4631]: I1204 17:50:57.822305 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55d98d789b-mxmt5" event={"ID":"a2a21943-309a-4d97-9e94-5f3248de544c","Type":"ContainerDied","Data":"c7234416a30da8efe9fd058bca01c7f0b459a50f43ae28b10faa94fc2225254a"} Dec 04 17:50:57 crc kubenswrapper[4631]: I1204 17:50:57.824578 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-598cb64fd8-27j5s" event={"ID":"28ff9a8c-f5dd-4b98-822a-a405abb26dd3","Type":"ContainerStarted","Data":"6507895a4c80799a49d7889c7137363d44d6d4a8ceff7f7307ec8e3bc8c63a5c"} Dec 04 17:50:57 crc kubenswrapper[4631]: I1204 17:50:57.843851 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" event={"ID":"b5516312-5bde-4c7d-8910-bf75f2a98812","Type":"ContainerStarted","Data":"dbaf6b90a621a8d9cf31e73943264a9edfa59e1355444275ab54aed2171ec062"} Dec 04 17:50:57 crc kubenswrapper[4631]: I1204 17:50:57.870339 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec86a01c-35c1-4302-a746-843df2ed1e3b","Type":"ContainerStarted","Data":"3730a193baa4b9803f54cec93f0df32aa11b00dd5cc4bb07142b2d87be8b3300"} Dec 04 17:50:57 crc kubenswrapper[4631]: I1204 17:50:57.871164 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-f9b7c48cf-xdj7r" event={"ID":"1de91a80-bddc-4f80-bf05-0d1aba161730","Type":"ContainerStarted","Data":"fb848e7532ca5e9e530a00c662361abd811cec548ed6afc056c268d1cbceba4f"} Dec 04 17:50:58 crc kubenswrapper[4631]: I1204 17:50:58.911024 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-598cb64fd8-27j5s" event={"ID":"28ff9a8c-f5dd-4b98-822a-a405abb26dd3","Type":"ContainerStarted","Data":"a7c532fa3d9123117c778d4b946a33d72af49a4e1f313c71832372c8fb8a5546"} Dec 04 17:50:58 crc kubenswrapper[4631]: I1204 17:50:58.911525 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-598cb64fd8-27j5s" event={"ID":"28ff9a8c-f5dd-4b98-822a-a405abb26dd3","Type":"ContainerStarted","Data":"a8c762adc1022485f5f5d7d601e4e4253e7583933a6fa8ed41bbf327631801ed"} Dec 04 17:50:58 crc kubenswrapper[4631]: I1204 17:50:58.918992 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:50:58 crc kubenswrapper[4631]: I1204 17:50:58.919035 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:50:58 crc kubenswrapper[4631]: I1204 17:50:58.940598 4631 generic.go:334] "Generic (PLEG): container finished" podID="ffcbb49b-28de-482a-a1c7-6d055f0cbf52" containerID="5c7048f2af7e5427bc8d6bf5212421b3cd85be6b8b56dd36e18d7584e6715121" exitCode=0 Dec 04 17:50:58 crc kubenswrapper[4631]: I1204 17:50:58.940665 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" event={"ID":"ffcbb49b-28de-482a-a1c7-6d055f0cbf52","Type":"ContainerDied","Data":"5c7048f2af7e5427bc8d6bf5212421b3cd85be6b8b56dd36e18d7584e6715121"} Dec 04 17:50:58 crc kubenswrapper[4631]: I1204 17:50:58.961010 4631 generic.go:334] "Generic (PLEG): container finished" podID="30909de0-7786-4946-a607-95b9b6e25832" containerID="5c9f44c820d5cc1a48089767ab929c55847a291d0830e8844710a5b604fd2b3b" exitCode=0 Dec 04 17:50:58 crc kubenswrapper[4631]: I1204 17:50:58.961049 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" event={"ID":"30909de0-7786-4946-a607-95b9b6e25832","Type":"ContainerDied","Data":"5c9f44c820d5cc1a48089767ab929c55847a291d0830e8844710a5b604fd2b3b"} Dec 04 17:50:58 crc kubenswrapper[4631]: I1204 17:50:58.995624 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-598cb64fd8-27j5s" podStartSLOduration=3.9956073180000002 podStartE2EDuration="3.995607318s" podCreationTimestamp="2025-12-04 17:50:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:50:58.960150205 +0000 UTC m=+1388.992392203" watchObservedRunningTime="2025-12-04 17:50:58.995607318 +0000 UTC m=+1389.027849316" Dec 04 17:50:59 crc kubenswrapper[4631]: I1204 17:50:59.534318 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:50:59 crc kubenswrapper[4631]: I1204 17:50:59.595919 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-dns-svc\") pod \"30909de0-7786-4946-a607-95b9b6e25832\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " Dec 04 17:50:59 crc kubenswrapper[4631]: I1204 17:50:59.595968 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-ovsdbserver-nb\") pod \"30909de0-7786-4946-a607-95b9b6e25832\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " Dec 04 17:50:59 crc kubenswrapper[4631]: I1204 17:50:59.596001 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-ovsdbserver-sb\") pod \"30909de0-7786-4946-a607-95b9b6e25832\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " Dec 04 17:50:59 crc kubenswrapper[4631]: I1204 17:50:59.596155 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-dns-swift-storage-0\") pod \"30909de0-7786-4946-a607-95b9b6e25832\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " Dec 04 17:50:59 crc kubenswrapper[4631]: I1204 17:50:59.596202 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-config\") pod \"30909de0-7786-4946-a607-95b9b6e25832\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " Dec 04 17:50:59 crc kubenswrapper[4631]: I1204 17:50:59.596225 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8vqw\" (UniqueName: \"kubernetes.io/projected/30909de0-7786-4946-a607-95b9b6e25832-kube-api-access-x8vqw\") pod \"30909de0-7786-4946-a607-95b9b6e25832\" (UID: \"30909de0-7786-4946-a607-95b9b6e25832\") " Dec 04 17:50:59 crc kubenswrapper[4631]: I1204 17:50:59.630704 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30909de0-7786-4946-a607-95b9b6e25832-kube-api-access-x8vqw" (OuterVolumeSpecName: "kube-api-access-x8vqw") pod "30909de0-7786-4946-a607-95b9b6e25832" (UID: "30909de0-7786-4946-a607-95b9b6e25832"). InnerVolumeSpecName "kube-api-access-x8vqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:50:59 crc kubenswrapper[4631]: I1204 17:50:59.658226 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "30909de0-7786-4946-a607-95b9b6e25832" (UID: "30909de0-7786-4946-a607-95b9b6e25832"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:59 crc kubenswrapper[4631]: I1204 17:50:59.670350 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "30909de0-7786-4946-a607-95b9b6e25832" (UID: "30909de0-7786-4946-a607-95b9b6e25832"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:59 crc kubenswrapper[4631]: I1204 17:50:59.674054 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "30909de0-7786-4946-a607-95b9b6e25832" (UID: "30909de0-7786-4946-a607-95b9b6e25832"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:59 crc kubenswrapper[4631]: I1204 17:50:59.680724 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "30909de0-7786-4946-a607-95b9b6e25832" (UID: "30909de0-7786-4946-a607-95b9b6e25832"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:59 crc kubenswrapper[4631]: I1204 17:50:59.681038 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-config" (OuterVolumeSpecName: "config") pod "30909de0-7786-4946-a607-95b9b6e25832" (UID: "30909de0-7786-4946-a607-95b9b6e25832"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:50:59 crc kubenswrapper[4631]: I1204 17:50:59.699902 4631 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:59 crc kubenswrapper[4631]: I1204 17:50:59.699927 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:59 crc kubenswrapper[4631]: I1204 17:50:59.699938 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:59 crc kubenswrapper[4631]: I1204 17:50:59.699946 4631 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:59 crc kubenswrapper[4631]: I1204 17:50:59.699956 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30909de0-7786-4946-a607-95b9b6e25832-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:50:59 crc kubenswrapper[4631]: I1204 17:50:59.699965 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8vqw\" (UniqueName: \"kubernetes.io/projected/30909de0-7786-4946-a607-95b9b6e25832-kube-api-access-x8vqw\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:00 crc kubenswrapper[4631]: I1204 17:51:00.068270 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec86a01c-35c1-4302-a746-843df2ed1e3b","Type":"ContainerStarted","Data":"3e58cf7e9dab6c6ea45ca31917359198c5987b2531259c32a12dcf51721b16ac"} Dec 04 17:51:00 crc kubenswrapper[4631]: I1204 17:51:00.093280 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" event={"ID":"ffcbb49b-28de-482a-a1c7-6d055f0cbf52","Type":"ContainerStarted","Data":"d719c65490dae9465c3b7c17d44844548bfbac04f60cb4fbf9f84111308b1a94"} Dec 04 17:51:00 crc kubenswrapper[4631]: I1204 17:51:00.094334 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:51:00 crc kubenswrapper[4631]: I1204 17:51:00.096182 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"953bfb33-b2e6-421f-b29f-127c1406800b","Type":"ContainerStarted","Data":"c9fd9c2597ac06ed6eae0687923eaff9a8cb9fdf1673d02f2fe1db070567b28b"} Dec 04 17:51:00 crc kubenswrapper[4631]: I1204 17:51:00.098324 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" Dec 04 17:51:00 crc kubenswrapper[4631]: I1204 17:51:00.098356 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-n45p2" event={"ID":"30909de0-7786-4946-a607-95b9b6e25832","Type":"ContainerDied","Data":"00fb158a228684760e47cefe289a3daf609615d9d27a7068d769abd468a2f949"} Dec 04 17:51:00 crc kubenswrapper[4631]: I1204 17:51:00.098415 4631 scope.go:117] "RemoveContainer" containerID="5c9f44c820d5cc1a48089767ab929c55847a291d0830e8844710a5b604fd2b3b" Dec 04 17:51:00 crc kubenswrapper[4631]: I1204 17:51:00.126484 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" podStartSLOduration=4.126461751 podStartE2EDuration="4.126461751s" podCreationTimestamp="2025-12-04 17:50:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:51:00.111953656 +0000 UTC m=+1390.144195654" watchObservedRunningTime="2025-12-04 17:51:00.126461751 +0000 UTC m=+1390.158703749" Dec 04 17:51:00 crc kubenswrapper[4631]: I1204 17:51:00.192869 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-n45p2"] Dec 04 17:51:00 crc kubenswrapper[4631]: I1204 17:51:00.207849 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-n45p2"] Dec 04 17:51:00 crc kubenswrapper[4631]: I1204 17:51:00.279825 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30909de0-7786-4946-a607-95b9b6e25832" path="/var/lib/kubelet/pods/30909de0-7786-4946-a607-95b9b6e25832/volumes" Dec 04 17:51:00 crc kubenswrapper[4631]: I1204 17:51:00.372185 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 04 17:51:01 crc kubenswrapper[4631]: I1204 17:51:01.127570 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7daff208-9b1a-43f7-8fdb-0cd054474578","Type":"ContainerStarted","Data":"625cb2f1c778066c8fbe1411bc4256a641165bc5fd24fa05d944463a54a90130"} Dec 04 17:51:01 crc kubenswrapper[4631]: I1204 17:51:01.140864 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"953bfb33-b2e6-421f-b29f-127c1406800b","Type":"ContainerStarted","Data":"230820dcad8dfa829965aa50b8c72b5b8a64be364ca1791e1f88afd12d0e9d31"} Dec 04 17:51:01 crc kubenswrapper[4631]: I1204 17:51:01.141031 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="953bfb33-b2e6-421f-b29f-127c1406800b" containerName="cinder-api-log" containerID="cri-o://c9fd9c2597ac06ed6eae0687923eaff9a8cb9fdf1673d02f2fe1db070567b28b" gracePeriod=30 Dec 04 17:51:01 crc kubenswrapper[4631]: I1204 17:51:01.141115 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="953bfb33-b2e6-421f-b29f-127c1406800b" containerName="cinder-api" containerID="cri-o://230820dcad8dfa829965aa50b8c72b5b8a64be364ca1791e1f88afd12d0e9d31" gracePeriod=30 Dec 04 17:51:01 crc kubenswrapper[4631]: I1204 17:51:01.141356 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 04 17:51:01 crc kubenswrapper[4631]: I1204 17:51:01.149821 4631 generic.go:334] "Generic (PLEG): container finished" podID="a2a21943-309a-4d97-9e94-5f3248de544c" containerID="05c8f34ef3cd9028ef6940053c7f05e99242048b15d5dea70493d4f3817ca6e3" exitCode=0 Dec 04 17:51:01 crc kubenswrapper[4631]: I1204 17:51:01.149888 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55d98d789b-mxmt5" event={"ID":"a2a21943-309a-4d97-9e94-5f3248de544c","Type":"ContainerDied","Data":"05c8f34ef3cd9028ef6940053c7f05e99242048b15d5dea70493d4f3817ca6e3"} Dec 04 17:51:01 crc kubenswrapper[4631]: E1204 17:51:01.244202 4631 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod953bfb33_b2e6_421f_b29f_127c1406800b.slice/crio-c9fd9c2597ac06ed6eae0687923eaff9a8cb9fdf1673d02f2fe1db070567b28b.scope\": RecentStats: unable to find data in memory cache]" Dec 04 17:51:02 crc kubenswrapper[4631]: I1204 17:51:02.166107 4631 generic.go:334] "Generic (PLEG): container finished" podID="953bfb33-b2e6-421f-b29f-127c1406800b" containerID="c9fd9c2597ac06ed6eae0687923eaff9a8cb9fdf1673d02f2fe1db070567b28b" exitCode=143 Dec 04 17:51:02 crc kubenswrapper[4631]: I1204 17:51:02.166313 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"953bfb33-b2e6-421f-b29f-127c1406800b","Type":"ContainerDied","Data":"c9fd9c2597ac06ed6eae0687923eaff9a8cb9fdf1673d02f2fe1db070567b28b"} Dec 04 17:51:02 crc kubenswrapper[4631]: I1204 17:51:02.750820 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:51:02 crc kubenswrapper[4631]: I1204 17:51:02.784824 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.784809635 podStartE2EDuration="6.784809635s" podCreationTimestamp="2025-12-04 17:50:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:51:01.165893934 +0000 UTC m=+1391.198135932" watchObservedRunningTime="2025-12-04 17:51:02.784809635 +0000 UTC m=+1392.817051633" Dec 04 17:51:02 crc kubenswrapper[4631]: I1204 17:51:02.815948 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-httpd-config\") pod \"a2a21943-309a-4d97-9e94-5f3248de544c\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " Dec 04 17:51:02 crc kubenswrapper[4631]: I1204 17:51:02.816044 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6sn7p\" (UniqueName: \"kubernetes.io/projected/a2a21943-309a-4d97-9e94-5f3248de544c-kube-api-access-6sn7p\") pod \"a2a21943-309a-4d97-9e94-5f3248de544c\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " Dec 04 17:51:02 crc kubenswrapper[4631]: I1204 17:51:02.816124 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-ovndb-tls-certs\") pod \"a2a21943-309a-4d97-9e94-5f3248de544c\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " Dec 04 17:51:02 crc kubenswrapper[4631]: I1204 17:51:02.816141 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-config\") pod \"a2a21943-309a-4d97-9e94-5f3248de544c\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " Dec 04 17:51:02 crc kubenswrapper[4631]: I1204 17:51:02.816167 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-combined-ca-bundle\") pod \"a2a21943-309a-4d97-9e94-5f3248de544c\" (UID: \"a2a21943-309a-4d97-9e94-5f3248de544c\") " Dec 04 17:51:02 crc kubenswrapper[4631]: I1204 17:51:02.825619 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "a2a21943-309a-4d97-9e94-5f3248de544c" (UID: "a2a21943-309a-4d97-9e94-5f3248de544c"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:02 crc kubenswrapper[4631]: I1204 17:51:02.826667 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2a21943-309a-4d97-9e94-5f3248de544c-kube-api-access-6sn7p" (OuterVolumeSpecName: "kube-api-access-6sn7p") pod "a2a21943-309a-4d97-9e94-5f3248de544c" (UID: "a2a21943-309a-4d97-9e94-5f3248de544c"). InnerVolumeSpecName "kube-api-access-6sn7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:51:02 crc kubenswrapper[4631]: I1204 17:51:02.910249 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a2a21943-309a-4d97-9e94-5f3248de544c" (UID: "a2a21943-309a-4d97-9e94-5f3248de544c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:02 crc kubenswrapper[4631]: I1204 17:51:02.918096 4631 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:02 crc kubenswrapper[4631]: I1204 17:51:02.918126 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6sn7p\" (UniqueName: \"kubernetes.io/projected/a2a21943-309a-4d97-9e94-5f3248de544c-kube-api-access-6sn7p\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:02 crc kubenswrapper[4631]: I1204 17:51:02.918137 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:02 crc kubenswrapper[4631]: I1204 17:51:02.960786 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-config" (OuterVolumeSpecName: "config") pod "a2a21943-309a-4d97-9e94-5f3248de544c" (UID: "a2a21943-309a-4d97-9e94-5f3248de544c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:02 crc kubenswrapper[4631]: I1204 17:51:02.989494 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "a2a21943-309a-4d97-9e94-5f3248de544c" (UID: "a2a21943-309a-4d97-9e94-5f3248de544c"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:03 crc kubenswrapper[4631]: I1204 17:51:03.020141 4631 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:03 crc kubenswrapper[4631]: I1204 17:51:03.020186 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/a2a21943-309a-4d97-9e94-5f3248de544c-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:03 crc kubenswrapper[4631]: I1204 17:51:03.179513 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" event={"ID":"b5516312-5bde-4c7d-8910-bf75f2a98812","Type":"ContainerStarted","Data":"28c9fd89cac45c4c2f8edf1e54db447bf5492c8be4a3074a889672a67d36535d"} Dec 04 17:51:03 crc kubenswrapper[4631]: I1204 17:51:03.182817 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec86a01c-35c1-4302-a746-843df2ed1e3b","Type":"ContainerStarted","Data":"a932e7f1f91716b14d407320310f64ee2f8686dad7dd821f2c4301e07650ff7b"} Dec 04 17:51:03 crc kubenswrapper[4631]: I1204 17:51:03.185990 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55d98d789b-mxmt5" Dec 04 17:51:03 crc kubenswrapper[4631]: I1204 17:51:03.185947 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55d98d789b-mxmt5" event={"ID":"a2a21943-309a-4d97-9e94-5f3248de544c","Type":"ContainerDied","Data":"f1e556fba8af9785eabd3962b567e29fed1266d6e4298152e88731e6ec882f75"} Dec 04 17:51:03 crc kubenswrapper[4631]: I1204 17:51:03.186306 4631 scope.go:117] "RemoveContainer" containerID="c7234416a30da8efe9fd058bca01c7f0b459a50f43ae28b10faa94fc2225254a" Dec 04 17:51:03 crc kubenswrapper[4631]: I1204 17:51:03.200835 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-f9b7c48cf-xdj7r" event={"ID":"1de91a80-bddc-4f80-bf05-0d1aba161730","Type":"ContainerStarted","Data":"98540cd005939bed1df1b78dd3258fcc83f32cdf043485788996694fae5420dc"} Dec 04 17:51:03 crc kubenswrapper[4631]: I1204 17:51:03.269174 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-55d98d789b-mxmt5"] Dec 04 17:51:03 crc kubenswrapper[4631]: I1204 17:51:03.278922 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-55d98d789b-mxmt5"] Dec 04 17:51:03 crc kubenswrapper[4631]: I1204 17:51:03.564021 4631 scope.go:117] "RemoveContainer" containerID="05c8f34ef3cd9028ef6940053c7f05e99242048b15d5dea70493d4f3817ca6e3" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.077130 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6866f4d6b8-5wp55"] Dec 04 17:51:04 crc kubenswrapper[4631]: E1204 17:51:04.078211 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30909de0-7786-4946-a607-95b9b6e25832" containerName="init" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.078278 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="30909de0-7786-4946-a607-95b9b6e25832" containerName="init" Dec 04 17:51:04 crc kubenswrapper[4631]: E1204 17:51:04.078361 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2a21943-309a-4d97-9e94-5f3248de544c" containerName="neutron-api" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.078467 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2a21943-309a-4d97-9e94-5f3248de544c" containerName="neutron-api" Dec 04 17:51:04 crc kubenswrapper[4631]: E1204 17:51:04.078566 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2a21943-309a-4d97-9e94-5f3248de544c" containerName="neutron-httpd" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.078626 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2a21943-309a-4d97-9e94-5f3248de544c" containerName="neutron-httpd" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.078854 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2a21943-309a-4d97-9e94-5f3248de544c" containerName="neutron-api" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.078920 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="30909de0-7786-4946-a607-95b9b6e25832" containerName="init" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.078988 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2a21943-309a-4d97-9e94-5f3248de544c" containerName="neutron-httpd" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.087509 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.094067 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.094800 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.100503 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6866f4d6b8-5wp55"] Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.140707 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f65b2092-9992-4e4d-be14-6ea85af840a0-logs\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.140805 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f65b2092-9992-4e4d-be14-6ea85af840a0-public-tls-certs\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.140829 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f65b2092-9992-4e4d-be14-6ea85af840a0-combined-ca-bundle\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.140853 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f65b2092-9992-4e4d-be14-6ea85af840a0-config-data-custom\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.140874 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pv27\" (UniqueName: \"kubernetes.io/projected/f65b2092-9992-4e4d-be14-6ea85af840a0-kube-api-access-6pv27\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.140894 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f65b2092-9992-4e4d-be14-6ea85af840a0-config-data\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.140915 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f65b2092-9992-4e4d-be14-6ea85af840a0-internal-tls-certs\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.237674 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-f9b7c48cf-xdj7r" event={"ID":"1de91a80-bddc-4f80-bf05-0d1aba161730","Type":"ContainerStarted","Data":"76be06ee301dbcf93e8a129a8254d5e9802359701b55005a97d115b33af9b836"} Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.244438 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f65b2092-9992-4e4d-be14-6ea85af840a0-public-tls-certs\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.244619 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f65b2092-9992-4e4d-be14-6ea85af840a0-combined-ca-bundle\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.244719 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f65b2092-9992-4e4d-be14-6ea85af840a0-config-data-custom\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.244809 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pv27\" (UniqueName: \"kubernetes.io/projected/f65b2092-9992-4e4d-be14-6ea85af840a0-kube-api-access-6pv27\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.244931 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f65b2092-9992-4e4d-be14-6ea85af840a0-config-data\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.245031 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f65b2092-9992-4e4d-be14-6ea85af840a0-internal-tls-certs\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.245149 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f65b2092-9992-4e4d-be14-6ea85af840a0-logs\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.245606 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f65b2092-9992-4e4d-be14-6ea85af840a0-logs\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.270509 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f65b2092-9992-4e4d-be14-6ea85af840a0-public-tls-certs\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.270653 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f65b2092-9992-4e4d-be14-6ea85af840a0-internal-tls-certs\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.271181 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f65b2092-9992-4e4d-be14-6ea85af840a0-config-data\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.271489 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f65b2092-9992-4e4d-be14-6ea85af840a0-config-data-custom\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.271984 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2a21943-309a-4d97-9e94-5f3248de544c" path="/var/lib/kubelet/pods/a2a21943-309a-4d97-9e94-5f3248de544c/volumes" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.283115 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f65b2092-9992-4e4d-be14-6ea85af840a0-combined-ca-bundle\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.286565 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7daff208-9b1a-43f7-8fdb-0cd054474578","Type":"ContainerStarted","Data":"468043442ce7c7bfbbb68619b25d20e3950531ea1ffffe39321d52f7401ad484"} Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.316495 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-f9b7c48cf-xdj7r" podStartSLOduration=5.477014523 podStartE2EDuration="10.316468953s" podCreationTimestamp="2025-12-04 17:50:54 +0000 UTC" firstStartedPulling="2025-12-04 17:50:57.121617362 +0000 UTC m=+1387.153859360" lastFinishedPulling="2025-12-04 17:51:01.961071792 +0000 UTC m=+1391.993313790" observedRunningTime="2025-12-04 17:51:04.279895929 +0000 UTC m=+1394.312137927" watchObservedRunningTime="2025-12-04 17:51:04.316468953 +0000 UTC m=+1394.348710971" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.322997 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pv27\" (UniqueName: \"kubernetes.io/projected/f65b2092-9992-4e4d-be14-6ea85af840a0-kube-api-access-6pv27\") pod \"barbican-api-6866f4d6b8-5wp55\" (UID: \"f65b2092-9992-4e4d-be14-6ea85af840a0\") " pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.366080 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=8.114346435 podStartE2EDuration="9.36605634s" podCreationTimestamp="2025-12-04 17:50:55 +0000 UTC" firstStartedPulling="2025-12-04 17:50:57.455767825 +0000 UTC m=+1387.488009823" lastFinishedPulling="2025-12-04 17:50:58.70747773 +0000 UTC m=+1388.739719728" observedRunningTime="2025-12-04 17:51:04.333696075 +0000 UTC m=+1394.365938073" watchObservedRunningTime="2025-12-04 17:51:04.36605634 +0000 UTC m=+1394.398298348" Dec 04 17:51:04 crc kubenswrapper[4631]: I1204 17:51:04.439913 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:05 crc kubenswrapper[4631]: I1204 17:51:05.006165 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6866f4d6b8-5wp55"] Dec 04 17:51:05 crc kubenswrapper[4631]: I1204 17:51:05.317708 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" event={"ID":"b5516312-5bde-4c7d-8910-bf75f2a98812","Type":"ContainerStarted","Data":"954cd79166493691b4730a429d5c1d47d012f8239bdefb755fd7693c7c8dee73"} Dec 04 17:51:05 crc kubenswrapper[4631]: I1204 17:51:05.320810 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec86a01c-35c1-4302-a746-843df2ed1e3b","Type":"ContainerStarted","Data":"677807845b9688381844e5641183e72834b7c6d3b5c96ee2a8cd4779925e5ec5"} Dec 04 17:51:05 crc kubenswrapper[4631]: I1204 17:51:05.323720 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6866f4d6b8-5wp55" event={"ID":"f65b2092-9992-4e4d-be14-6ea85af840a0","Type":"ContainerStarted","Data":"a71ae3800c016ff952991a21fef4b6a1ed7849a99a148311aecf3297ba70e3b8"} Dec 04 17:51:06 crc kubenswrapper[4631]: I1204 17:51:06.336297 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec86a01c-35c1-4302-a746-843df2ed1e3b","Type":"ContainerStarted","Data":"854ad5ed9561f4ead7b07e9545917d2eeec7a88aff1bff5cf96f86d3ec728aff"} Dec 04 17:51:06 crc kubenswrapper[4631]: I1204 17:51:06.338071 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 04 17:51:06 crc kubenswrapper[4631]: I1204 17:51:06.357316 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6866f4d6b8-5wp55" event={"ID":"f65b2092-9992-4e4d-be14-6ea85af840a0","Type":"ContainerStarted","Data":"3882cd723df49f5453589f555c9bc9212e85a1032007424f029933ab02f733cf"} Dec 04 17:51:06 crc kubenswrapper[4631]: I1204 17:51:06.357365 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6866f4d6b8-5wp55" event={"ID":"f65b2092-9992-4e4d-be14-6ea85af840a0","Type":"ContainerStarted","Data":"fe2500a08926735cbef79eb1a915b0ae69d7261b5ad774a13f3dbdb5adb14aba"} Dec 04 17:51:06 crc kubenswrapper[4631]: I1204 17:51:06.357487 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:06 crc kubenswrapper[4631]: I1204 17:51:06.357519 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:06 crc kubenswrapper[4631]: I1204 17:51:06.374108 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-75cd87c688-xlr7b" podStartSLOduration=7.537011772 podStartE2EDuration="12.374084303s" podCreationTimestamp="2025-12-04 17:50:54 +0000 UTC" firstStartedPulling="2025-12-04 17:50:57.124008341 +0000 UTC m=+1387.156250339" lastFinishedPulling="2025-12-04 17:51:01.961080872 +0000 UTC m=+1391.993322870" observedRunningTime="2025-12-04 17:51:05.343646667 +0000 UTC m=+1395.375888665" watchObservedRunningTime="2025-12-04 17:51:06.374084303 +0000 UTC m=+1396.406326301" Dec 04 17:51:06 crc kubenswrapper[4631]: I1204 17:51:06.384747 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.049680275 podStartE2EDuration="11.384725537s" podCreationTimestamp="2025-12-04 17:50:55 +0000 UTC" firstStartedPulling="2025-12-04 17:50:57.368215774 +0000 UTC m=+1387.400457772" lastFinishedPulling="2025-12-04 17:51:05.703261036 +0000 UTC m=+1395.735503034" observedRunningTime="2025-12-04 17:51:06.376694027 +0000 UTC m=+1396.408936035" watchObservedRunningTime="2025-12-04 17:51:06.384725537 +0000 UTC m=+1396.416967535" Dec 04 17:51:06 crc kubenswrapper[4631]: I1204 17:51:06.406648 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6866f4d6b8-5wp55" podStartSLOduration=2.406632822 podStartE2EDuration="2.406632822s" podCreationTimestamp="2025-12-04 17:51:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:51:06.403119372 +0000 UTC m=+1396.435361370" watchObservedRunningTime="2025-12-04 17:51:06.406632822 +0000 UTC m=+1396.438874820" Dec 04 17:51:06 crc kubenswrapper[4631]: I1204 17:51:06.484540 4631 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podc6763071-ba0b-4ef7-9843-9a4c66fe4a6f"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podc6763071-ba0b-4ef7-9843-9a4c66fe4a6f] : Timed out while waiting for systemd to remove kubepods-besteffort-podc6763071_ba0b_4ef7_9843_9a4c66fe4a6f.slice" Dec 04 17:51:06 crc kubenswrapper[4631]: I1204 17:51:06.582872 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 04 17:51:06 crc kubenswrapper[4631]: I1204 17:51:06.715512 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:51:06 crc kubenswrapper[4631]: I1204 17:51:06.833381 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-m9cnv"] Dec 04 17:51:06 crc kubenswrapper[4631]: I1204 17:51:06.833706 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" podUID="1a4004ec-e678-4d85-a83a-b9af1bd78865" containerName="dnsmasq-dns" containerID="cri-o://6a8cd481f651c8d73b1b19c26aadca1133fdfc3e0cdc4cc28604f2e00795a23b" gracePeriod=10 Dec 04 17:51:06 crc kubenswrapper[4631]: I1204 17:51:06.918197 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.212645 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-598cb64fd8-27j5s" podUID="28ff9a8c-f5dd-4b98-822a-a405abb26dd3" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.383536 4631 generic.go:334] "Generic (PLEG): container finished" podID="1a4004ec-e678-4d85-a83a-b9af1bd78865" containerID="6a8cd481f651c8d73b1b19c26aadca1133fdfc3e0cdc4cc28604f2e00795a23b" exitCode=0 Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.384432 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" event={"ID":"1a4004ec-e678-4d85-a83a-b9af1bd78865","Type":"ContainerDied","Data":"6a8cd481f651c8d73b1b19c26aadca1133fdfc3e0cdc4cc28604f2e00795a23b"} Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.470994 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.608542 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.620555 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-dns-svc\") pod \"1a4004ec-e678-4d85-a83a-b9af1bd78865\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.620632 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-config\") pod \"1a4004ec-e678-4d85-a83a-b9af1bd78865\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.620656 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vs9xb\" (UniqueName: \"kubernetes.io/projected/1a4004ec-e678-4d85-a83a-b9af1bd78865-kube-api-access-vs9xb\") pod \"1a4004ec-e678-4d85-a83a-b9af1bd78865\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.620723 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-dns-swift-storage-0\") pod \"1a4004ec-e678-4d85-a83a-b9af1bd78865\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.620759 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-ovsdbserver-sb\") pod \"1a4004ec-e678-4d85-a83a-b9af1bd78865\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.620789 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-ovsdbserver-nb\") pod \"1a4004ec-e678-4d85-a83a-b9af1bd78865\" (UID: \"1a4004ec-e678-4d85-a83a-b9af1bd78865\") " Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.675669 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a4004ec-e678-4d85-a83a-b9af1bd78865-kube-api-access-vs9xb" (OuterVolumeSpecName: "kube-api-access-vs9xb") pod "1a4004ec-e678-4d85-a83a-b9af1bd78865" (UID: "1a4004ec-e678-4d85-a83a-b9af1bd78865"). InnerVolumeSpecName "kube-api-access-vs9xb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.723449 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vs9xb\" (UniqueName: \"kubernetes.io/projected/1a4004ec-e678-4d85-a83a-b9af1bd78865-kube-api-access-vs9xb\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.780764 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1a4004ec-e678-4d85-a83a-b9af1bd78865" (UID: "1a4004ec-e678-4d85-a83a-b9af1bd78865"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.782591 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1a4004ec-e678-4d85-a83a-b9af1bd78865" (UID: "1a4004ec-e678-4d85-a83a-b9af1bd78865"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.825794 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-config" (OuterVolumeSpecName: "config") pod "1a4004ec-e678-4d85-a83a-b9af1bd78865" (UID: "1a4004ec-e678-4d85-a83a-b9af1bd78865"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.829920 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.829948 4631 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.829958 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.841231 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1a4004ec-e678-4d85-a83a-b9af1bd78865" (UID: "1a4004ec-e678-4d85-a83a-b9af1bd78865"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.843805 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1a4004ec-e678-4d85-a83a-b9af1bd78865" (UID: "1a4004ec-e678-4d85-a83a-b9af1bd78865"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.885581 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7b99dd8d64-9nrvl" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.885970 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.886667 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"c0232110e98619eba87d73ad4f17b16ad869ae5f8c81c4f0cb9721c3b739dcde"} pod="openstack/horizon-7b99dd8d64-9nrvl" containerMessage="Container horizon failed startup probe, will be restarted" Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.886710 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7b99dd8d64-9nrvl" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon" containerID="cri-o://c0232110e98619eba87d73ad4f17b16ad869ae5f8c81c4f0cb9721c3b739dcde" gracePeriod=30 Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.941779 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:07 crc kubenswrapper[4631]: I1204 17:51:07.941829 4631 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a4004ec-e678-4d85-a83a-b9af1bd78865-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:08 crc kubenswrapper[4631]: I1204 17:51:08.005184 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-77d5fd455b-8kwkp" podUID="78aafb4d-470c-477d-bfe6-5b7a29b79fc0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 17:51:08 crc kubenswrapper[4631]: I1204 17:51:08.005277 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:51:08 crc kubenswrapper[4631]: I1204 17:51:08.006028 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"b06b4495ac4d64fb90ce71822defbadb0ac8243d3559e3457c5c1f6e4549434b"} pod="openstack/horizon-77d5fd455b-8kwkp" containerMessage="Container horizon failed startup probe, will be restarted" Dec 04 17:51:08 crc kubenswrapper[4631]: I1204 17:51:08.006068 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-77d5fd455b-8kwkp" podUID="78aafb4d-470c-477d-bfe6-5b7a29b79fc0" containerName="horizon" containerID="cri-o://b06b4495ac4d64fb90ce71822defbadb0ac8243d3559e3457c5c1f6e4549434b" gracePeriod=30 Dec 04 17:51:08 crc kubenswrapper[4631]: I1204 17:51:08.395494 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" Dec 04 17:51:08 crc kubenswrapper[4631]: I1204 17:51:08.396002 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-m9cnv" event={"ID":"1a4004ec-e678-4d85-a83a-b9af1bd78865","Type":"ContainerDied","Data":"8211e37e432266ff3cd58a8cade8e3d05102733d4384577ec1e408fc0ad17865"} Dec 04 17:51:08 crc kubenswrapper[4631]: I1204 17:51:08.396036 4631 scope.go:117] "RemoveContainer" containerID="6a8cd481f651c8d73b1b19c26aadca1133fdfc3e0cdc4cc28604f2e00795a23b" Dec 04 17:51:08 crc kubenswrapper[4631]: I1204 17:51:08.396218 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7daff208-9b1a-43f7-8fdb-0cd054474578" containerName="cinder-scheduler" containerID="cri-o://625cb2f1c778066c8fbe1411bc4256a641165bc5fd24fa05d944463a54a90130" gracePeriod=30 Dec 04 17:51:08 crc kubenswrapper[4631]: I1204 17:51:08.397060 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7daff208-9b1a-43f7-8fdb-0cd054474578" containerName="probe" containerID="cri-o://468043442ce7c7bfbbb68619b25d20e3950531ea1ffffe39321d52f7401ad484" gracePeriod=30 Dec 04 17:51:08 crc kubenswrapper[4631]: I1204 17:51:08.420530 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-m9cnv"] Dec 04 17:51:08 crc kubenswrapper[4631]: I1204 17:51:08.434071 4631 scope.go:117] "RemoveContainer" containerID="c1d51b59f3c0306470f800b58ca7c3e45cdbf32e011d9872e400a0c20ee84974" Dec 04 17:51:08 crc kubenswrapper[4631]: I1204 17:51:08.437674 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-m9cnv"] Dec 04 17:51:09 crc kubenswrapper[4631]: I1204 17:51:09.405283 4631 generic.go:334] "Generic (PLEG): container finished" podID="7daff208-9b1a-43f7-8fdb-0cd054474578" containerID="468043442ce7c7bfbbb68619b25d20e3950531ea1ffffe39321d52f7401ad484" exitCode=0 Dec 04 17:51:09 crc kubenswrapper[4631]: I1204 17:51:09.405553 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7daff208-9b1a-43f7-8fdb-0cd054474578","Type":"ContainerDied","Data":"468043442ce7c7bfbbb68619b25d20e3950531ea1ffffe39321d52f7401ad484"} Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.167183 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.178855 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-combined-ca-bundle\") pod \"7daff208-9b1a-43f7-8fdb-0cd054474578\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.179055 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwkgc\" (UniqueName: \"kubernetes.io/projected/7daff208-9b1a-43f7-8fdb-0cd054474578-kube-api-access-lwkgc\") pod \"7daff208-9b1a-43f7-8fdb-0cd054474578\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.179100 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-config-data\") pod \"7daff208-9b1a-43f7-8fdb-0cd054474578\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.179163 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-config-data-custom\") pod \"7daff208-9b1a-43f7-8fdb-0cd054474578\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.179259 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-scripts\") pod \"7daff208-9b1a-43f7-8fdb-0cd054474578\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.179290 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7daff208-9b1a-43f7-8fdb-0cd054474578-etc-machine-id\") pod \"7daff208-9b1a-43f7-8fdb-0cd054474578\" (UID: \"7daff208-9b1a-43f7-8fdb-0cd054474578\") " Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.179419 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7daff208-9b1a-43f7-8fdb-0cd054474578-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7daff208-9b1a-43f7-8fdb-0cd054474578" (UID: "7daff208-9b1a-43f7-8fdb-0cd054474578"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.179740 4631 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7daff208-9b1a-43f7-8fdb-0cd054474578-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.186599 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7daff208-9b1a-43f7-8fdb-0cd054474578-kube-api-access-lwkgc" (OuterVolumeSpecName: "kube-api-access-lwkgc") pod "7daff208-9b1a-43f7-8fdb-0cd054474578" (UID: "7daff208-9b1a-43f7-8fdb-0cd054474578"). InnerVolumeSpecName "kube-api-access-lwkgc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.190466 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7daff208-9b1a-43f7-8fdb-0cd054474578" (UID: "7daff208-9b1a-43f7-8fdb-0cd054474578"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.193529 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-scripts" (OuterVolumeSpecName: "scripts") pod "7daff208-9b1a-43f7-8fdb-0cd054474578" (UID: "7daff208-9b1a-43f7-8fdb-0cd054474578"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.282249 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a4004ec-e678-4d85-a83a-b9af1bd78865" path="/var/lib/kubelet/pods/1a4004ec-e678-4d85-a83a-b9af1bd78865/volumes" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.283901 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.283918 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwkgc\" (UniqueName: \"kubernetes.io/projected/7daff208-9b1a-43f7-8fdb-0cd054474578-kube-api-access-lwkgc\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.283929 4631 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.296600 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7daff208-9b1a-43f7-8fdb-0cd054474578" (UID: "7daff208-9b1a-43f7-8fdb-0cd054474578"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.366361 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-config-data" (OuterVolumeSpecName: "config-data") pod "7daff208-9b1a-43f7-8fdb-0cd054474578" (UID: "7daff208-9b1a-43f7-8fdb-0cd054474578"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.386804 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.386833 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7daff208-9b1a-43f7-8fdb-0cd054474578-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.405405 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-776f95766d-5qctj" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.436931 4631 generic.go:334] "Generic (PLEG): container finished" podID="7daff208-9b1a-43f7-8fdb-0cd054474578" containerID="625cb2f1c778066c8fbe1411bc4256a641165bc5fd24fa05d944463a54a90130" exitCode=0 Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.437000 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7daff208-9b1a-43f7-8fdb-0cd054474578","Type":"ContainerDied","Data":"625cb2f1c778066c8fbe1411bc4256a641165bc5fd24fa05d944463a54a90130"} Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.437031 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7daff208-9b1a-43f7-8fdb-0cd054474578","Type":"ContainerDied","Data":"2fb1125bde5008670fdc2ed094ad077339e584b535909b20b09cb72172fc9268"} Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.437068 4631 scope.go:117] "RemoveContainer" containerID="468043442ce7c7bfbbb68619b25d20e3950531ea1ffffe39321d52f7401ad484" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.437226 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.467763 4631 scope.go:117] "RemoveContainer" containerID="625cb2f1c778066c8fbe1411bc4256a641165bc5fd24fa05d944463a54a90130" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.483018 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.499041 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.515261 4631 scope.go:117] "RemoveContainer" containerID="468043442ce7c7bfbbb68619b25d20e3950531ea1ffffe39321d52f7401ad484" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.515415 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 04 17:51:10 crc kubenswrapper[4631]: E1204 17:51:10.515790 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a4004ec-e678-4d85-a83a-b9af1bd78865" containerName="init" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.515810 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a4004ec-e678-4d85-a83a-b9af1bd78865" containerName="init" Dec 04 17:51:10 crc kubenswrapper[4631]: E1204 17:51:10.515838 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a4004ec-e678-4d85-a83a-b9af1bd78865" containerName="dnsmasq-dns" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.515847 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a4004ec-e678-4d85-a83a-b9af1bd78865" containerName="dnsmasq-dns" Dec 04 17:51:10 crc kubenswrapper[4631]: E1204 17:51:10.515871 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7daff208-9b1a-43f7-8fdb-0cd054474578" containerName="probe" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.515878 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="7daff208-9b1a-43f7-8fdb-0cd054474578" containerName="probe" Dec 04 17:51:10 crc kubenswrapper[4631]: E1204 17:51:10.515894 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7daff208-9b1a-43f7-8fdb-0cd054474578" containerName="cinder-scheduler" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.515903 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="7daff208-9b1a-43f7-8fdb-0cd054474578" containerName="cinder-scheduler" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.516139 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="7daff208-9b1a-43f7-8fdb-0cd054474578" containerName="probe" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.516161 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="7daff208-9b1a-43f7-8fdb-0cd054474578" containerName="cinder-scheduler" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.516172 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a4004ec-e678-4d85-a83a-b9af1bd78865" containerName="dnsmasq-dns" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.517233 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: E1204 17:51:10.522603 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"468043442ce7c7bfbbb68619b25d20e3950531ea1ffffe39321d52f7401ad484\": container with ID starting with 468043442ce7c7bfbbb68619b25d20e3950531ea1ffffe39321d52f7401ad484 not found: ID does not exist" containerID="468043442ce7c7bfbbb68619b25d20e3950531ea1ffffe39321d52f7401ad484" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.522640 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"468043442ce7c7bfbbb68619b25d20e3950531ea1ffffe39321d52f7401ad484"} err="failed to get container status \"468043442ce7c7bfbbb68619b25d20e3950531ea1ffffe39321d52f7401ad484\": rpc error: code = NotFound desc = could not find container \"468043442ce7c7bfbbb68619b25d20e3950531ea1ffffe39321d52f7401ad484\": container with ID starting with 468043442ce7c7bfbbb68619b25d20e3950531ea1ffffe39321d52f7401ad484 not found: ID does not exist" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.522666 4631 scope.go:117] "RemoveContainer" containerID="625cb2f1c778066c8fbe1411bc4256a641165bc5fd24fa05d944463a54a90130" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.523105 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 04 17:51:10 crc kubenswrapper[4631]: E1204 17:51:10.524605 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"625cb2f1c778066c8fbe1411bc4256a641165bc5fd24fa05d944463a54a90130\": container with ID starting with 625cb2f1c778066c8fbe1411bc4256a641165bc5fd24fa05d944463a54a90130 not found: ID does not exist" containerID="625cb2f1c778066c8fbe1411bc4256a641165bc5fd24fa05d944463a54a90130" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.524635 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"625cb2f1c778066c8fbe1411bc4256a641165bc5fd24fa05d944463a54a90130"} err="failed to get container status \"625cb2f1c778066c8fbe1411bc4256a641165bc5fd24fa05d944463a54a90130\": rpc error: code = NotFound desc = could not find container \"625cb2f1c778066c8fbe1411bc4256a641165bc5fd24fa05d944463a54a90130\": container with ID starting with 625cb2f1c778066c8fbe1411bc4256a641165bc5fd24fa05d944463a54a90130 not found: ID does not exist" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.548303 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.588279 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f5599bd7-2ca5-4217-a0bd-785b3fb612b7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"f5599bd7-2ca5-4217-a0bd-785b3fb612b7\") " pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.588325 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5599bd7-2ca5-4217-a0bd-785b3fb612b7-config-data\") pod \"cinder-scheduler-0\" (UID: \"f5599bd7-2ca5-4217-a0bd-785b3fb612b7\") " pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.588403 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77gzm\" (UniqueName: \"kubernetes.io/projected/f5599bd7-2ca5-4217-a0bd-785b3fb612b7-kube-api-access-77gzm\") pod \"cinder-scheduler-0\" (UID: \"f5599bd7-2ca5-4217-a0bd-785b3fb612b7\") " pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.588422 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5599bd7-2ca5-4217-a0bd-785b3fb612b7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"f5599bd7-2ca5-4217-a0bd-785b3fb612b7\") " pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.588502 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f5599bd7-2ca5-4217-a0bd-785b3fb612b7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"f5599bd7-2ca5-4217-a0bd-785b3fb612b7\") " pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.588540 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5599bd7-2ca5-4217-a0bd-785b3fb612b7-scripts\") pod \"cinder-scheduler-0\" (UID: \"f5599bd7-2ca5-4217-a0bd-785b3fb612b7\") " pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.595848 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.689341 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f5599bd7-2ca5-4217-a0bd-785b3fb612b7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"f5599bd7-2ca5-4217-a0bd-785b3fb612b7\") " pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.689398 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5599bd7-2ca5-4217-a0bd-785b3fb612b7-config-data\") pod \"cinder-scheduler-0\" (UID: \"f5599bd7-2ca5-4217-a0bd-785b3fb612b7\") " pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.689491 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f5599bd7-2ca5-4217-a0bd-785b3fb612b7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"f5599bd7-2ca5-4217-a0bd-785b3fb612b7\") " pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.689518 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77gzm\" (UniqueName: \"kubernetes.io/projected/f5599bd7-2ca5-4217-a0bd-785b3fb612b7-kube-api-access-77gzm\") pod \"cinder-scheduler-0\" (UID: \"f5599bd7-2ca5-4217-a0bd-785b3fb612b7\") " pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.690608 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5599bd7-2ca5-4217-a0bd-785b3fb612b7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"f5599bd7-2ca5-4217-a0bd-785b3fb612b7\") " pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.690764 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f5599bd7-2ca5-4217-a0bd-785b3fb612b7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"f5599bd7-2ca5-4217-a0bd-785b3fb612b7\") " pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.690798 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5599bd7-2ca5-4217-a0bd-785b3fb612b7-scripts\") pod \"cinder-scheduler-0\" (UID: \"f5599bd7-2ca5-4217-a0bd-785b3fb612b7\") " pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.694895 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5599bd7-2ca5-4217-a0bd-785b3fb612b7-config-data\") pod \"cinder-scheduler-0\" (UID: \"f5599bd7-2ca5-4217-a0bd-785b3fb612b7\") " pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.696905 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5599bd7-2ca5-4217-a0bd-785b3fb612b7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"f5599bd7-2ca5-4217-a0bd-785b3fb612b7\") " pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.697803 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f5599bd7-2ca5-4217-a0bd-785b3fb612b7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"f5599bd7-2ca5-4217-a0bd-785b3fb612b7\") " pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.697921 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5599bd7-2ca5-4217-a0bd-785b3fb612b7-scripts\") pod \"cinder-scheduler-0\" (UID: \"f5599bd7-2ca5-4217-a0bd-785b3fb612b7\") " pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.712098 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77gzm\" (UniqueName: \"kubernetes.io/projected/f5599bd7-2ca5-4217-a0bd-785b3fb612b7-kube-api-access-77gzm\") pod \"cinder-scheduler-0\" (UID: \"f5599bd7-2ca5-4217-a0bd-785b3fb612b7\") " pod="openstack/cinder-scheduler-0" Dec 04 17:51:10 crc kubenswrapper[4631]: I1204 17:51:10.843693 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 04 17:51:11 crc kubenswrapper[4631]: I1204 17:51:11.212715 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-598cb64fd8-27j5s" podUID="28ff9a8c-f5dd-4b98-822a-a405abb26dd3" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 17:51:11 crc kubenswrapper[4631]: I1204 17:51:11.491524 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 04 17:51:11 crc kubenswrapper[4631]: I1204 17:51:11.725827 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-776f95766d-5qctj" Dec 04 17:51:11 crc kubenswrapper[4631]: I1204 17:51:11.968685 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="953bfb33-b2e6-421f-b29f-127c1406800b" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.163:8776/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 17:51:12 crc kubenswrapper[4631]: I1204 17:51:12.214283 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-598cb64fd8-27j5s" podUID="28ff9a8c-f5dd-4b98-822a-a405abb26dd3" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 17:51:12 crc kubenswrapper[4631]: I1204 17:51:12.256617 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7daff208-9b1a-43f7-8fdb-0cd054474578" path="/var/lib/kubelet/pods/7daff208-9b1a-43f7-8fdb-0cd054474578/volumes" Dec 04 17:51:12 crc kubenswrapper[4631]: I1204 17:51:12.478745 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f5599bd7-2ca5-4217-a0bd-785b3fb612b7","Type":"ContainerStarted","Data":"a7bb9a863cf8a08baa88d6d712d0ff4b2096af54b42c838f771c35fbc5df5bd5"} Dec 04 17:51:12 crc kubenswrapper[4631]: I1204 17:51:12.482994 4631 generic.go:334] "Generic (PLEG): container finished" podID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerID="c0232110e98619eba87d73ad4f17b16ad869ae5f8c81c4f0cb9721c3b739dcde" exitCode=0 Dec 04 17:51:12 crc kubenswrapper[4631]: I1204 17:51:12.483044 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b99dd8d64-9nrvl" event={"ID":"a675d52a-03e9-46e8-8b51-4e7f378179cf","Type":"ContainerDied","Data":"c0232110e98619eba87d73ad4f17b16ad869ae5f8c81c4f0cb9721c3b739dcde"} Dec 04 17:51:13 crc kubenswrapper[4631]: I1204 17:51:13.004747 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-589bf6fb8-62vft" Dec 04 17:51:13 crc kubenswrapper[4631]: I1204 17:51:13.527056 4631 generic.go:334] "Generic (PLEG): container finished" podID="78aafb4d-470c-477d-bfe6-5b7a29b79fc0" containerID="b06b4495ac4d64fb90ce71822defbadb0ac8243d3559e3457c5c1f6e4549434b" exitCode=0 Dec 04 17:51:13 crc kubenswrapper[4631]: I1204 17:51:13.527192 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-77d5fd455b-8kwkp" event={"ID":"78aafb4d-470c-477d-bfe6-5b7a29b79fc0","Type":"ContainerDied","Data":"b06b4495ac4d64fb90ce71822defbadb0ac8243d3559e3457c5c1f6e4549434b"} Dec 04 17:51:13 crc kubenswrapper[4631]: I1204 17:51:13.527224 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-77d5fd455b-8kwkp" event={"ID":"78aafb4d-470c-477d-bfe6-5b7a29b79fc0","Type":"ContainerStarted","Data":"ec315d419e0896783310c02e5963616349f6c59f77be147f4ac44a81f9f78b34"} Dec 04 17:51:13 crc kubenswrapper[4631]: I1204 17:51:13.538784 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b99dd8d64-9nrvl" event={"ID":"a675d52a-03e9-46e8-8b51-4e7f378179cf","Type":"ContainerStarted","Data":"7c15bbd3b5b3f4435f5df4d6fad19a863e4c5a1e382a6d66758122b5970fd161"} Dec 04 17:51:13 crc kubenswrapper[4631]: I1204 17:51:13.554629 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f5599bd7-2ca5-4217-a0bd-785b3fb612b7","Type":"ContainerStarted","Data":"e62a2ae7574fbcf2f203f9ad622688518c4e3b9f46c1f24272891bd8fe8ed3ae"} Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.379820 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.381229 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.383652 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.383808 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.384065 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-2cd4q" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.401176 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.523783 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\") " pod="openstack/openstackclient" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.523870 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-openstack-config-secret\") pod \"openstackclient\" (UID: \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\") " pod="openstack/openstackclient" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.524061 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qdgv\" (UniqueName: \"kubernetes.io/projected/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-kube-api-access-9qdgv\") pod \"openstackclient\" (UID: \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\") " pod="openstack/openstackclient" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.524206 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-openstack-config\") pod \"openstackclient\" (UID: \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\") " pod="openstack/openstackclient" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.588646 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f5599bd7-2ca5-4217-a0bd-785b3fb612b7","Type":"ContainerStarted","Data":"fc49935c7cfea1895711c57523b67881d29a198d5b01631c0cc928b11bd99f52"} Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.630277 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\") " pod="openstack/openstackclient" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.630392 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-openstack-config-secret\") pod \"openstackclient\" (UID: \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\") " pod="openstack/openstackclient" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.630436 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qdgv\" (UniqueName: \"kubernetes.io/projected/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-kube-api-access-9qdgv\") pod \"openstackclient\" (UID: \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\") " pod="openstack/openstackclient" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.630475 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-openstack-config\") pod \"openstackclient\" (UID: \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\") " pod="openstack/openstackclient" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.632651 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-openstack-config\") pod \"openstackclient\" (UID: \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\") " pod="openstack/openstackclient" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.642291 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\") " pod="openstack/openstackclient" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.649219 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-openstack-config-secret\") pod \"openstackclient\" (UID: \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\") " pod="openstack/openstackclient" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.652773 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qdgv\" (UniqueName: \"kubernetes.io/projected/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-kube-api-access-9qdgv\") pod \"openstackclient\" (UID: \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\") " pod="openstack/openstackclient" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.673594 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.673576287 podStartE2EDuration="4.673576287s" podCreationTimestamp="2025-12-04 17:51:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:51:14.617946668 +0000 UTC m=+1404.650188666" watchObservedRunningTime="2025-12-04 17:51:14.673576287 +0000 UTC m=+1404.705818285" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.688606 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.689343 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.703000 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.771900 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-598cb64fd8-27j5s" podUID="28ff9a8c-f5dd-4b98-822a-a405abb26dd3" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.791275 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.792501 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.830224 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.935297 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4csvd\" (UniqueName: \"kubernetes.io/projected/c65652e6-704f-4f88-9b9d-435868d33e0e-kube-api-access-4csvd\") pod \"openstackclient\" (UID: \"c65652e6-704f-4f88-9b9d-435868d33e0e\") " pod="openstack/openstackclient" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.935342 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/c65652e6-704f-4f88-9b9d-435868d33e0e-openstack-config\") pod \"openstackclient\" (UID: \"c65652e6-704f-4f88-9b9d-435868d33e0e\") " pod="openstack/openstackclient" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.935426 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/c65652e6-704f-4f88-9b9d-435868d33e0e-openstack-config-secret\") pod \"openstackclient\" (UID: \"c65652e6-704f-4f88-9b9d-435868d33e0e\") " pod="openstack/openstackclient" Dec 04 17:51:14 crc kubenswrapper[4631]: I1204 17:51:14.935487 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c65652e6-704f-4f88-9b9d-435868d33e0e-combined-ca-bundle\") pod \"openstackclient\" (UID: \"c65652e6-704f-4f88-9b9d-435868d33e0e\") " pod="openstack/openstackclient" Dec 04 17:51:15 crc kubenswrapper[4631]: E1204 17:51:15.001019 4631 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 04 17:51:15 crc kubenswrapper[4631]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130_0(2c8082722ca340bfbf26e27495fe25cc220977e2f924dccd7413edba202cd5fe): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"2c8082722ca340bfbf26e27495fe25cc220977e2f924dccd7413edba202cd5fe" Netns:"/var/run/netns/28161409-8ac6-4e02-803e-a75a7f4dec1c" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=2c8082722ca340bfbf26e27495fe25cc220977e2f924dccd7413edba202cd5fe;K8S_POD_UID=7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130]: expected pod UID "7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130" but got "c65652e6-704f-4f88-9b9d-435868d33e0e" from Kube API Dec 04 17:51:15 crc kubenswrapper[4631]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 04 17:51:15 crc kubenswrapper[4631]: > Dec 04 17:51:15 crc kubenswrapper[4631]: E1204 17:51:15.001085 4631 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 04 17:51:15 crc kubenswrapper[4631]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130_0(2c8082722ca340bfbf26e27495fe25cc220977e2f924dccd7413edba202cd5fe): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"2c8082722ca340bfbf26e27495fe25cc220977e2f924dccd7413edba202cd5fe" Netns:"/var/run/netns/28161409-8ac6-4e02-803e-a75a7f4dec1c" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=2c8082722ca340bfbf26e27495fe25cc220977e2f924dccd7413edba202cd5fe;K8S_POD_UID=7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130]: expected pod UID "7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130" but got "c65652e6-704f-4f88-9b9d-435868d33e0e" from Kube API Dec 04 17:51:15 crc kubenswrapper[4631]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 04 17:51:15 crc kubenswrapper[4631]: > pod="openstack/openstackclient" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.037305 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c65652e6-704f-4f88-9b9d-435868d33e0e-combined-ca-bundle\") pod \"openstackclient\" (UID: \"c65652e6-704f-4f88-9b9d-435868d33e0e\") " pod="openstack/openstackclient" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.037468 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4csvd\" (UniqueName: \"kubernetes.io/projected/c65652e6-704f-4f88-9b9d-435868d33e0e-kube-api-access-4csvd\") pod \"openstackclient\" (UID: \"c65652e6-704f-4f88-9b9d-435868d33e0e\") " pod="openstack/openstackclient" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.037495 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/c65652e6-704f-4f88-9b9d-435868d33e0e-openstack-config\") pod \"openstackclient\" (UID: \"c65652e6-704f-4f88-9b9d-435868d33e0e\") " pod="openstack/openstackclient" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.037536 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/c65652e6-704f-4f88-9b9d-435868d33e0e-openstack-config-secret\") pod \"openstackclient\" (UID: \"c65652e6-704f-4f88-9b9d-435868d33e0e\") " pod="openstack/openstackclient" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.038844 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/c65652e6-704f-4f88-9b9d-435868d33e0e-openstack-config\") pod \"openstackclient\" (UID: \"c65652e6-704f-4f88-9b9d-435868d33e0e\") " pod="openstack/openstackclient" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.042043 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/c65652e6-704f-4f88-9b9d-435868d33e0e-openstack-config-secret\") pod \"openstackclient\" (UID: \"c65652e6-704f-4f88-9b9d-435868d33e0e\") " pod="openstack/openstackclient" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.045875 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c65652e6-704f-4f88-9b9d-435868d33e0e-combined-ca-bundle\") pod \"openstackclient\" (UID: \"c65652e6-704f-4f88-9b9d-435868d33e0e\") " pod="openstack/openstackclient" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.064001 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4csvd\" (UniqueName: \"kubernetes.io/projected/c65652e6-704f-4f88-9b9d-435868d33e0e-kube-api-access-4csvd\") pod \"openstackclient\" (UID: \"c65652e6-704f-4f88-9b9d-435868d33e0e\") " pod="openstack/openstackclient" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.179313 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.483517 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-6866f4d6b8-5wp55" podUID="f65b2092-9992-4e4d-be14-6ea85af840a0" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.164:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.483724 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-6866f4d6b8-5wp55" podUID="f65b2092-9992-4e4d-be14-6ea85af840a0" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.164:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.603714 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.608003 4631 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130" podUID="c65652e6-704f-4f88-9b9d-435868d33e0e" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.619770 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.760798 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-openstack-config\") pod \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\" (UID: \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\") " Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.760866 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9qdgv\" (UniqueName: \"kubernetes.io/projected/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-kube-api-access-9qdgv\") pod \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\" (UID: \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\") " Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.760933 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-openstack-config-secret\") pod \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\" (UID: \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\") " Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.761143 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-combined-ca-bundle\") pod \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\" (UID: \"7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130\") " Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.761942 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130" (UID: "7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.762733 4631 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.768881 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130" (UID: "7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.772619 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-kube-api-access-9qdgv" (OuterVolumeSpecName: "kube-api-access-9qdgv") pod "7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130" (UID: "7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130"). InnerVolumeSpecName "kube-api-access-9qdgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.790529 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130" (UID: "7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.844438 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.866623 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.866661 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9qdgv\" (UniqueName: \"kubernetes.io/projected/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-kube-api-access-9qdgv\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.866672 4631 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.878894 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 04 17:51:15 crc kubenswrapper[4631]: I1204 17:51:15.901894 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:51:16 crc kubenswrapper[4631]: I1204 17:51:16.249002 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130" path="/var/lib/kubelet/pods/7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130/volumes" Dec 04 17:51:16 crc kubenswrapper[4631]: I1204 17:51:16.612814 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"c65652e6-704f-4f88-9b9d-435868d33e0e","Type":"ContainerStarted","Data":"6fffb49f820f3bac36909725959fca489fb2f355dfcda9a90f53adab7fb23ca0"} Dec 04 17:51:16 crc kubenswrapper[4631]: I1204 17:51:16.612831 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 04 17:51:16 crc kubenswrapper[4631]: I1204 17:51:16.619109 4631 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="7f4c6ac5-0d22-4bcf-94e4-eab3c1cce130" podUID="c65652e6-704f-4f88-9b9d-435868d33e0e" Dec 04 17:51:17 crc kubenswrapper[4631]: I1204 17:51:17.010690 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="953bfb33-b2e6-421f-b29f-127c1406800b" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.163:8776/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 17:51:17 crc kubenswrapper[4631]: I1204 17:51:17.820866 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:19 crc kubenswrapper[4631]: I1204 17:51:19.035943 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6866f4d6b8-5wp55" Dec 04 17:51:19 crc kubenswrapper[4631]: I1204 17:51:19.126880 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-598cb64fd8-27j5s"] Dec 04 17:51:19 crc kubenswrapper[4631]: I1204 17:51:19.127727 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-598cb64fd8-27j5s" podUID="28ff9a8c-f5dd-4b98-822a-a405abb26dd3" containerName="barbican-api-log" containerID="cri-o://a8c762adc1022485f5f5d7d601e4e4253e7583933a6fa8ed41bbf327631801ed" gracePeriod=30 Dec 04 17:51:19 crc kubenswrapper[4631]: I1204 17:51:19.128122 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-598cb64fd8-27j5s" podUID="28ff9a8c-f5dd-4b98-822a-a405abb26dd3" containerName="barbican-api" containerID="cri-o://a7c532fa3d9123117c778d4b946a33d72af49a4e1f313c71832372c8fb8a5546" gracePeriod=30 Dec 04 17:51:19 crc kubenswrapper[4631]: I1204 17:51:19.688208 4631 generic.go:334] "Generic (PLEG): container finished" podID="28ff9a8c-f5dd-4b98-822a-a405abb26dd3" containerID="a8c762adc1022485f5f5d7d601e4e4253e7583933a6fa8ed41bbf327631801ed" exitCode=143 Dec 04 17:51:19 crc kubenswrapper[4631]: I1204 17:51:19.688252 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-598cb64fd8-27j5s" event={"ID":"28ff9a8c-f5dd-4b98-822a-a405abb26dd3","Type":"ContainerDied","Data":"a8c762adc1022485f5f5d7d601e4e4253e7583933a6fa8ed41bbf327631801ed"} Dec 04 17:51:21 crc kubenswrapper[4631]: I1204 17:51:21.627751 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 04 17:51:22 crc kubenswrapper[4631]: I1204 17:51:22.052526 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="953bfb33-b2e6-421f-b29f-127c1406800b" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.163:8776/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 17:51:22 crc kubenswrapper[4631]: I1204 17:51:22.379344 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-598cb64fd8-27j5s" podUID="28ff9a8c-f5dd-4b98-822a-a405abb26dd3" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:47712->10.217.0.159:9311: read: connection reset by peer" Dec 04 17:51:22 crc kubenswrapper[4631]: I1204 17:51:22.379410 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-598cb64fd8-27j5s" podUID="28ff9a8c-f5dd-4b98-822a-a405abb26dd3" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:47724->10.217.0.159:9311: read: connection reset by peer" Dec 04 17:51:22 crc kubenswrapper[4631]: I1204 17:51:22.875488 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:51:22 crc kubenswrapper[4631]: I1204 17:51:22.875534 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:51:22 crc kubenswrapper[4631]: I1204 17:51:22.878024 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7b99dd8d64-9nrvl" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Dec 04 17:51:22 crc kubenswrapper[4631]: I1204 17:51:22.999952 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:51:22 crc kubenswrapper[4631]: I1204 17:51:23.000003 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.615961 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.719951 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7p5x\" (UniqueName: \"kubernetes.io/projected/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-kube-api-access-x7p5x\") pod \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.720257 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-config-data-custom\") pod \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.720285 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-combined-ca-bundle\") pod \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.720341 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-logs\") pod \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.720387 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-config-data\") pod \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\" (UID: \"28ff9a8c-f5dd-4b98-822a-a405abb26dd3\") " Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.721137 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-logs" (OuterVolumeSpecName: "logs") pod "28ff9a8c-f5dd-4b98-822a-a405abb26dd3" (UID: "28ff9a8c-f5dd-4b98-822a-a405abb26dd3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.736687 4631 generic.go:334] "Generic (PLEG): container finished" podID="28ff9a8c-f5dd-4b98-822a-a405abb26dd3" containerID="a7c532fa3d9123117c778d4b946a33d72af49a4e1f313c71832372c8fb8a5546" exitCode=0 Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.736872 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-598cb64fd8-27j5s" event={"ID":"28ff9a8c-f5dd-4b98-822a-a405abb26dd3","Type":"ContainerDied","Data":"a7c532fa3d9123117c778d4b946a33d72af49a4e1f313c71832372c8fb8a5546"} Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.737013 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-598cb64fd8-27j5s" event={"ID":"28ff9a8c-f5dd-4b98-822a-a405abb26dd3","Type":"ContainerDied","Data":"6507895a4c80799a49d7889c7137363d44d6d4a8ceff7f7307ec8e3bc8c63a5c"} Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.736938 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-598cb64fd8-27j5s" Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.737080 4631 scope.go:117] "RemoveContainer" containerID="a7c532fa3d9123117c778d4b946a33d72af49a4e1f313c71832372c8fb8a5546" Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.819494 4631 scope.go:117] "RemoveContainer" containerID="a8c762adc1022485f5f5d7d601e4e4253e7583933a6fa8ed41bbf327631801ed" Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.822157 4631 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-logs\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.825677 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-kube-api-access-x7p5x" (OuterVolumeSpecName: "kube-api-access-x7p5x") pod "28ff9a8c-f5dd-4b98-822a-a405abb26dd3" (UID: "28ff9a8c-f5dd-4b98-822a-a405abb26dd3"). InnerVolumeSpecName "kube-api-access-x7p5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.832656 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "28ff9a8c-f5dd-4b98-822a-a405abb26dd3" (UID: "28ff9a8c-f5dd-4b98-822a-a405abb26dd3"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.839561 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "28ff9a8c-f5dd-4b98-822a-a405abb26dd3" (UID: "28ff9a8c-f5dd-4b98-822a-a405abb26dd3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.880335 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-config-data" (OuterVolumeSpecName: "config-data") pod "28ff9a8c-f5dd-4b98-822a-a405abb26dd3" (UID: "28ff9a8c-f5dd-4b98-822a-a405abb26dd3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.923841 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7p5x\" (UniqueName: \"kubernetes.io/projected/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-kube-api-access-x7p5x\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.923903 4631 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.923916 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.923930 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28ff9a8c-f5dd-4b98-822a-a405abb26dd3-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.931589 4631 scope.go:117] "RemoveContainer" containerID="a7c532fa3d9123117c778d4b946a33d72af49a4e1f313c71832372c8fb8a5546" Dec 04 17:51:23 crc kubenswrapper[4631]: E1204 17:51:23.932168 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7c532fa3d9123117c778d4b946a33d72af49a4e1f313c71832372c8fb8a5546\": container with ID starting with a7c532fa3d9123117c778d4b946a33d72af49a4e1f313c71832372c8fb8a5546 not found: ID does not exist" containerID="a7c532fa3d9123117c778d4b946a33d72af49a4e1f313c71832372c8fb8a5546" Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.932308 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7c532fa3d9123117c778d4b946a33d72af49a4e1f313c71832372c8fb8a5546"} err="failed to get container status \"a7c532fa3d9123117c778d4b946a33d72af49a4e1f313c71832372c8fb8a5546\": rpc error: code = NotFound desc = could not find container \"a7c532fa3d9123117c778d4b946a33d72af49a4e1f313c71832372c8fb8a5546\": container with ID starting with a7c532fa3d9123117c778d4b946a33d72af49a4e1f313c71832372c8fb8a5546 not found: ID does not exist" Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.932441 4631 scope.go:117] "RemoveContainer" containerID="a8c762adc1022485f5f5d7d601e4e4253e7583933a6fa8ed41bbf327631801ed" Dec 04 17:51:23 crc kubenswrapper[4631]: E1204 17:51:23.937759 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8c762adc1022485f5f5d7d601e4e4253e7583933a6fa8ed41bbf327631801ed\": container with ID starting with a8c762adc1022485f5f5d7d601e4e4253e7583933a6fa8ed41bbf327631801ed not found: ID does not exist" containerID="a8c762adc1022485f5f5d7d601e4e4253e7583933a6fa8ed41bbf327631801ed" Dec 04 17:51:23 crc kubenswrapper[4631]: I1204 17:51:23.937977 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8c762adc1022485f5f5d7d601e4e4253e7583933a6fa8ed41bbf327631801ed"} err="failed to get container status \"a8c762adc1022485f5f5d7d601e4e4253e7583933a6fa8ed41bbf327631801ed\": rpc error: code = NotFound desc = could not find container \"a8c762adc1022485f5f5d7d601e4e4253e7583933a6fa8ed41bbf327631801ed\": container with ID starting with a8c762adc1022485f5f5d7d601e4e4253e7583933a6fa8ed41bbf327631801ed not found: ID does not exist" Dec 04 17:51:24 crc kubenswrapper[4631]: I1204 17:51:24.077347 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-598cb64fd8-27j5s"] Dec 04 17:51:24 crc kubenswrapper[4631]: I1204 17:51:24.087723 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-598cb64fd8-27j5s"] Dec 04 17:51:24 crc kubenswrapper[4631]: I1204 17:51:24.250920 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28ff9a8c-f5dd-4b98-822a-a405abb26dd3" path="/var/lib/kubelet/pods/28ff9a8c-f5dd-4b98-822a-a405abb26dd3/volumes" Dec 04 17:51:25 crc kubenswrapper[4631]: I1204 17:51:25.247861 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 04 17:51:26 crc kubenswrapper[4631]: I1204 17:51:26.484185 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.737446 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-5dd47bd8d5-qcz5l"] Dec 04 17:51:27 crc kubenswrapper[4631]: E1204 17:51:27.738151 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28ff9a8c-f5dd-4b98-822a-a405abb26dd3" containerName="barbican-api" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.738164 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="28ff9a8c-f5dd-4b98-822a-a405abb26dd3" containerName="barbican-api" Dec 04 17:51:27 crc kubenswrapper[4631]: E1204 17:51:27.738173 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28ff9a8c-f5dd-4b98-822a-a405abb26dd3" containerName="barbican-api-log" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.738179 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="28ff9a8c-f5dd-4b98-822a-a405abb26dd3" containerName="barbican-api-log" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.738337 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="28ff9a8c-f5dd-4b98-822a-a405abb26dd3" containerName="barbican-api-log" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.738353 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="28ff9a8c-f5dd-4b98-822a-a405abb26dd3" containerName="barbican-api" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.739279 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.742271 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.742582 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.742751 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.768199 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5dd47bd8d5-qcz5l"] Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.809616 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-etc-swift\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.809672 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-internal-tls-certs\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.809803 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-run-httpd\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.809893 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-log-httpd\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.809938 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-combined-ca-bundle\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.809961 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-config-data\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.809977 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-public-tls-certs\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.810034 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w96hx\" (UniqueName: \"kubernetes.io/projected/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-kube-api-access-w96hx\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.912649 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-etc-swift\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.912707 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-internal-tls-certs\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.912765 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-run-httpd\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.912794 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-log-httpd\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.912830 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-combined-ca-bundle\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.912848 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-config-data\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.912864 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-public-tls-certs\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.912893 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w96hx\" (UniqueName: \"kubernetes.io/projected/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-kube-api-access-w96hx\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.913318 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-run-httpd\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.913874 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-log-httpd\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.931009 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-etc-swift\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.931871 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-internal-tls-certs\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.932744 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-combined-ca-bundle\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.934982 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-config-data\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.938533 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-public-tls-certs\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:27 crc kubenswrapper[4631]: I1204 17:51:27.942112 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w96hx\" (UniqueName: \"kubernetes.io/projected/2f9535d3-d81d-4e55-bc05-f36a8dd6b731-kube-api-access-w96hx\") pod \"swift-proxy-5dd47bd8d5-qcz5l\" (UID: \"2f9535d3-d81d-4e55-bc05-f36a8dd6b731\") " pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:28 crc kubenswrapper[4631]: I1204 17:51:28.059878 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:30 crc kubenswrapper[4631]: I1204 17:51:30.385552 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:51:30 crc kubenswrapper[4631]: I1204 17:51:30.396129 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerName="ceilometer-central-agent" containerID="cri-o://3e58cf7e9dab6c6ea45ca31917359198c5987b2531259c32a12dcf51721b16ac" gracePeriod=30 Dec 04 17:51:30 crc kubenswrapper[4631]: I1204 17:51:30.396513 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerName="proxy-httpd" containerID="cri-o://854ad5ed9561f4ead7b07e9545917d2eeec7a88aff1bff5cf96f86d3ec728aff" gracePeriod=30 Dec 04 17:51:30 crc kubenswrapper[4631]: I1204 17:51:30.396527 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerName="ceilometer-notification-agent" containerID="cri-o://a932e7f1f91716b14d407320310f64ee2f8686dad7dd821f2c4301e07650ff7b" gracePeriod=30 Dec 04 17:51:30 crc kubenswrapper[4631]: I1204 17:51:30.396608 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerName="sg-core" containerID="cri-o://677807845b9688381844e5641183e72834b7c6d3b5c96ee2a8cd4779925e5ec5" gracePeriod=30 Dec 04 17:51:30 crc kubenswrapper[4631]: I1204 17:51:30.831475 4631 generic.go:334] "Generic (PLEG): container finished" podID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerID="854ad5ed9561f4ead7b07e9545917d2eeec7a88aff1bff5cf96f86d3ec728aff" exitCode=0 Dec 04 17:51:30 crc kubenswrapper[4631]: I1204 17:51:30.831983 4631 generic.go:334] "Generic (PLEG): container finished" podID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerID="677807845b9688381844e5641183e72834b7c6d3b5c96ee2a8cd4779925e5ec5" exitCode=2 Dec 04 17:51:30 crc kubenswrapper[4631]: I1204 17:51:30.832003 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec86a01c-35c1-4302-a746-843df2ed1e3b","Type":"ContainerDied","Data":"854ad5ed9561f4ead7b07e9545917d2eeec7a88aff1bff5cf96f86d3ec728aff"} Dec 04 17:51:30 crc kubenswrapper[4631]: I1204 17:51:30.832027 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec86a01c-35c1-4302-a746-843df2ed1e3b","Type":"ContainerDied","Data":"677807845b9688381844e5641183e72834b7c6d3b5c96ee2a8cd4779925e5ec5"} Dec 04 17:51:31 crc kubenswrapper[4631]: I1204 17:51:31.850924 4631 generic.go:334] "Generic (PLEG): container finished" podID="953bfb33-b2e6-421f-b29f-127c1406800b" containerID="230820dcad8dfa829965aa50b8c72b5b8a64be364ca1791e1f88afd12d0e9d31" exitCode=137 Dec 04 17:51:31 crc kubenswrapper[4631]: I1204 17:51:31.850994 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"953bfb33-b2e6-421f-b29f-127c1406800b","Type":"ContainerDied","Data":"230820dcad8dfa829965aa50b8c72b5b8a64be364ca1791e1f88afd12d0e9d31"} Dec 04 17:51:31 crc kubenswrapper[4631]: I1204 17:51:31.858997 4631 generic.go:334] "Generic (PLEG): container finished" podID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerID="3e58cf7e9dab6c6ea45ca31917359198c5987b2531259c32a12dcf51721b16ac" exitCode=0 Dec 04 17:51:31 crc kubenswrapper[4631]: I1204 17:51:31.859042 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec86a01c-35c1-4302-a746-843df2ed1e3b","Type":"ContainerDied","Data":"3e58cf7e9dab6c6ea45ca31917359198c5987b2531259c32a12dcf51721b16ac"} Dec 04 17:51:31 crc kubenswrapper[4631]: I1204 17:51:31.924959 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="953bfb33-b2e6-421f-b29f-127c1406800b" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.163:8776/healthcheck\": dial tcp 10.217.0.163:8776: connect: connection refused" Dec 04 17:51:32 crc kubenswrapper[4631]: I1204 17:51:32.871384 4631 generic.go:334] "Generic (PLEG): container finished" podID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerID="a932e7f1f91716b14d407320310f64ee2f8686dad7dd821f2c4301e07650ff7b" exitCode=0 Dec 04 17:51:32 crc kubenswrapper[4631]: I1204 17:51:32.871567 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec86a01c-35c1-4302-a746-843df2ed1e3b","Type":"ContainerDied","Data":"a932e7f1f91716b14d407320310f64ee2f8686dad7dd821f2c4301e07650ff7b"} Dec 04 17:51:32 crc kubenswrapper[4631]: I1204 17:51:32.874972 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7b99dd8d64-9nrvl" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Dec 04 17:51:33 crc kubenswrapper[4631]: I1204 17:51:33.001805 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-77d5fd455b-8kwkp" podUID="78aafb4d-470c-477d-bfe6-5b7a29b79fc0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Dec 04 17:51:33 crc kubenswrapper[4631]: E1204 17:51:33.593807 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified" Dec 04 17:51:33 crc kubenswrapper[4631]: E1204 17:51:33.594213 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstackclient,Image:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,Command:[/bin/sleep],Args:[infinity],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nd7h86h9h54h97hcch64fh555h67h89h594h5f5h595hf8h67ch5d9hc9h579h6bh68bh548h57ch547h57h88h5b6h5fh579h696h58ch676hf5q,ValueFrom:nil,},EnvVar{Name:OS_CLOUD,Value:default,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_HOST,Value:metric-storage-prometheus.openstack.svc,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_PORT,Value:9090,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openstack-config,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/cloudrc,SubPath:cloudrc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4csvd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42401,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42401,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstackclient_openstack(c65652e6-704f-4f88-9b9d-435868d33e0e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 17:51:33 crc kubenswrapper[4631]: E1204 17:51:33.595741 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstackclient" podUID="c65652e6-704f-4f88-9b9d-435868d33e0e" Dec 04 17:51:33 crc kubenswrapper[4631]: E1204 17:51:33.896484 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified\\\"\"" pod="openstack/openstackclient" podUID="c65652e6-704f-4f88-9b9d-435868d33e0e" Dec 04 17:51:33 crc kubenswrapper[4631]: I1204 17:51:33.984797 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.065594 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/953bfb33-b2e6-421f-b29f-127c1406800b-etc-machine-id\") pod \"953bfb33-b2e6-421f-b29f-127c1406800b\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.065646 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-config-data\") pod \"953bfb33-b2e6-421f-b29f-127c1406800b\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.065671 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9nrj8\" (UniqueName: \"kubernetes.io/projected/953bfb33-b2e6-421f-b29f-127c1406800b-kube-api-access-9nrj8\") pod \"953bfb33-b2e6-421f-b29f-127c1406800b\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.065742 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-combined-ca-bundle\") pod \"953bfb33-b2e6-421f-b29f-127c1406800b\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.065776 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/953bfb33-b2e6-421f-b29f-127c1406800b-logs\") pod \"953bfb33-b2e6-421f-b29f-127c1406800b\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.065860 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-config-data-custom\") pod \"953bfb33-b2e6-421f-b29f-127c1406800b\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.065966 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-scripts\") pod \"953bfb33-b2e6-421f-b29f-127c1406800b\" (UID: \"953bfb33-b2e6-421f-b29f-127c1406800b\") " Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.067117 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/953bfb33-b2e6-421f-b29f-127c1406800b-logs" (OuterVolumeSpecName: "logs") pod "953bfb33-b2e6-421f-b29f-127c1406800b" (UID: "953bfb33-b2e6-421f-b29f-127c1406800b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.067747 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/953bfb33-b2e6-421f-b29f-127c1406800b-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "953bfb33-b2e6-421f-b29f-127c1406800b" (UID: "953bfb33-b2e6-421f-b29f-127c1406800b"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.079915 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-scripts" (OuterVolumeSpecName: "scripts") pod "953bfb33-b2e6-421f-b29f-127c1406800b" (UID: "953bfb33-b2e6-421f-b29f-127c1406800b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.083552 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/953bfb33-b2e6-421f-b29f-127c1406800b-kube-api-access-9nrj8" (OuterVolumeSpecName: "kube-api-access-9nrj8") pod "953bfb33-b2e6-421f-b29f-127c1406800b" (UID: "953bfb33-b2e6-421f-b29f-127c1406800b"). InnerVolumeSpecName "kube-api-access-9nrj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.088607 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "953bfb33-b2e6-421f-b29f-127c1406800b" (UID: "953bfb33-b2e6-421f-b29f-127c1406800b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.156144 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "953bfb33-b2e6-421f-b29f-127c1406800b" (UID: "953bfb33-b2e6-421f-b29f-127c1406800b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.166302 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.167491 4631 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.167512 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.167520 4631 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/953bfb33-b2e6-421f-b29f-127c1406800b-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.167529 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9nrj8\" (UniqueName: \"kubernetes.io/projected/953bfb33-b2e6-421f-b29f-127c1406800b-kube-api-access-9nrj8\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.167539 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.167547 4631 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/953bfb33-b2e6-421f-b29f-127c1406800b-logs\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.204295 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-config-data" (OuterVolumeSpecName: "config-data") pod "953bfb33-b2e6-421f-b29f-127c1406800b" (UID: "953bfb33-b2e6-421f-b29f-127c1406800b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.268895 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec86a01c-35c1-4302-a746-843df2ed1e3b-run-httpd\") pod \"ec86a01c-35c1-4302-a746-843df2ed1e3b\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.269003 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-config-data\") pod \"ec86a01c-35c1-4302-a746-843df2ed1e3b\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.269071 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-scripts\") pod \"ec86a01c-35c1-4302-a746-843df2ed1e3b\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.269109 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrl2t\" (UniqueName: \"kubernetes.io/projected/ec86a01c-35c1-4302-a746-843df2ed1e3b-kube-api-access-lrl2t\") pod \"ec86a01c-35c1-4302-a746-843df2ed1e3b\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.269139 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-sg-core-conf-yaml\") pod \"ec86a01c-35c1-4302-a746-843df2ed1e3b\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.269163 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-combined-ca-bundle\") pod \"ec86a01c-35c1-4302-a746-843df2ed1e3b\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.269227 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec86a01c-35c1-4302-a746-843df2ed1e3b-log-httpd\") pod \"ec86a01c-35c1-4302-a746-843df2ed1e3b\" (UID: \"ec86a01c-35c1-4302-a746-843df2ed1e3b\") " Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.269582 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/953bfb33-b2e6-421f-b29f-127c1406800b-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.270330 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec86a01c-35c1-4302-a746-843df2ed1e3b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ec86a01c-35c1-4302-a746-843df2ed1e3b" (UID: "ec86a01c-35c1-4302-a746-843df2ed1e3b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.272940 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec86a01c-35c1-4302-a746-843df2ed1e3b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ec86a01c-35c1-4302-a746-843df2ed1e3b" (UID: "ec86a01c-35c1-4302-a746-843df2ed1e3b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.275835 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec86a01c-35c1-4302-a746-843df2ed1e3b-kube-api-access-lrl2t" (OuterVolumeSpecName: "kube-api-access-lrl2t") pod "ec86a01c-35c1-4302-a746-843df2ed1e3b" (UID: "ec86a01c-35c1-4302-a746-843df2ed1e3b"). InnerVolumeSpecName "kube-api-access-lrl2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.285668 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-scripts" (OuterVolumeSpecName: "scripts") pod "ec86a01c-35c1-4302-a746-843df2ed1e3b" (UID: "ec86a01c-35c1-4302-a746-843df2ed1e3b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.373072 4631 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec86a01c-35c1-4302-a746-843df2ed1e3b-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.373101 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.373113 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrl2t\" (UniqueName: \"kubernetes.io/projected/ec86a01c-35c1-4302-a746-843df2ed1e3b-kube-api-access-lrl2t\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.373127 4631 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec86a01c-35c1-4302-a746-843df2ed1e3b-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.382594 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ec86a01c-35c1-4302-a746-843df2ed1e3b" (UID: "ec86a01c-35c1-4302-a746-843df2ed1e3b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.454306 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec86a01c-35c1-4302-a746-843df2ed1e3b" (UID: "ec86a01c-35c1-4302-a746-843df2ed1e3b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.454599 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-config-data" (OuterVolumeSpecName: "config-data") pod "ec86a01c-35c1-4302-a746-843df2ed1e3b" (UID: "ec86a01c-35c1-4302-a746-843df2ed1e3b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.474741 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.474767 4631 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.474785 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec86a01c-35c1-4302-a746-843df2ed1e3b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.556544 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5dd47bd8d5-qcz5l"] Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.901551 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"953bfb33-b2e6-421f-b29f-127c1406800b","Type":"ContainerDied","Data":"d963f491bf34b1ac1611be619b08293558b7031f84b3af25e675598763a3e144"} Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.901876 4631 scope.go:117] "RemoveContainer" containerID="230820dcad8dfa829965aa50b8c72b5b8a64be364ca1791e1f88afd12d0e9d31" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.901567 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.902911 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" event={"ID":"2f9535d3-d81d-4e55-bc05-f36a8dd6b731","Type":"ContainerStarted","Data":"87e994d0d466f9005c19301950991b36da2c23251f504a71468c401568ad3ac7"} Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.909532 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec86a01c-35c1-4302-a746-843df2ed1e3b","Type":"ContainerDied","Data":"3730a193baa4b9803f54cec93f0df32aa11b00dd5cc4bb07142b2d87be8b3300"} Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.909609 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.920545 4631 scope.go:117] "RemoveContainer" containerID="c9fd9c2597ac06ed6eae0687923eaff9a8cb9fdf1673d02f2fe1db070567b28b" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.934978 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.941013 4631 scope.go:117] "RemoveContainer" containerID="854ad5ed9561f4ead7b07e9545917d2eeec7a88aff1bff5cf96f86d3ec728aff" Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.955564 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.970614 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.993526 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:51:34 crc kubenswrapper[4631]: I1204 17:51:34.994511 4631 scope.go:117] "RemoveContainer" containerID="677807845b9688381844e5641183e72834b7c6d3b5c96ee2a8cd4779925e5ec5" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.027341 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 04 17:51:35 crc kubenswrapper[4631]: E1204 17:51:35.027766 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerName="ceilometer-central-agent" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.027780 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerName="ceilometer-central-agent" Dec 04 17:51:35 crc kubenswrapper[4631]: E1204 17:51:35.027805 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerName="sg-core" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.027813 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerName="sg-core" Dec 04 17:51:35 crc kubenswrapper[4631]: E1204 17:51:35.027820 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerName="ceilometer-notification-agent" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.027826 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerName="ceilometer-notification-agent" Dec 04 17:51:35 crc kubenswrapper[4631]: E1204 17:51:35.027844 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerName="proxy-httpd" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.027850 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerName="proxy-httpd" Dec 04 17:51:35 crc kubenswrapper[4631]: E1204 17:51:35.027862 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="953bfb33-b2e6-421f-b29f-127c1406800b" containerName="cinder-api" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.027867 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="953bfb33-b2e6-421f-b29f-127c1406800b" containerName="cinder-api" Dec 04 17:51:35 crc kubenswrapper[4631]: E1204 17:51:35.027875 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="953bfb33-b2e6-421f-b29f-127c1406800b" containerName="cinder-api-log" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.027880 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="953bfb33-b2e6-421f-b29f-127c1406800b" containerName="cinder-api-log" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.028043 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerName="ceilometer-central-agent" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.028068 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerName="proxy-httpd" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.028081 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="953bfb33-b2e6-421f-b29f-127c1406800b" containerName="cinder-api-log" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.028093 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerName="sg-core" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.028103 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec86a01c-35c1-4302-a746-843df2ed1e3b" containerName="ceilometer-notification-agent" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.028117 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="953bfb33-b2e6-421f-b29f-127c1406800b" containerName="cinder-api" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.029043 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.038813 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.038888 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.048446 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.053939 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.055553 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.055683 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.062320 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.062528 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.065877 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.071739 4631 scope.go:117] "RemoveContainer" containerID="a932e7f1f91716b14d407320310f64ee2f8686dad7dd821f2c4301e07650ff7b" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.117657 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b9d56f1-c2cf-471c-934b-15a0497af44b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.118467 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b9d56f1-c2cf-471c-934b-15a0497af44b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.118502 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b9d56f1-c2cf-471c-934b-15a0497af44b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.118566 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b9d56f1-c2cf-471c-934b-15a0497af44b-scripts\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.118722 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b9d56f1-c2cf-471c-934b-15a0497af44b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.118770 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b9d56f1-c2cf-471c-934b-15a0497af44b-logs\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.118810 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nk9n\" (UniqueName: \"kubernetes.io/projected/6b9d56f1-c2cf-471c-934b-15a0497af44b-kube-api-access-4nk9n\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.118871 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b9d56f1-c2cf-471c-934b-15a0497af44b-config-data-custom\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.118892 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b9d56f1-c2cf-471c-934b-15a0497af44b-config-data\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.130830 4631 scope.go:117] "RemoveContainer" containerID="3e58cf7e9dab6c6ea45ca31917359198c5987b2531259c32a12dcf51721b16ac" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.220692 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fj4gk\" (UniqueName: \"kubernetes.io/projected/5874bbe7-300c-47c7-8273-dd48c0572ff8-kube-api-access-fj4gk\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.220948 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.221001 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b9d56f1-c2cf-471c-934b-15a0497af44b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.221041 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b9d56f1-c2cf-471c-934b-15a0497af44b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.221070 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.221093 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-scripts\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.221151 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5874bbe7-300c-47c7-8273-dd48c0572ff8-run-httpd\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.221178 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b9d56f1-c2cf-471c-934b-15a0497af44b-scripts\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.221253 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-config-data\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.221279 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5874bbe7-300c-47c7-8273-dd48c0572ff8-log-httpd\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.221365 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b9d56f1-c2cf-471c-934b-15a0497af44b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.221479 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b9d56f1-c2cf-471c-934b-15a0497af44b-logs\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.221572 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nk9n\" (UniqueName: \"kubernetes.io/projected/6b9d56f1-c2cf-471c-934b-15a0497af44b-kube-api-access-4nk9n\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.221625 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b9d56f1-c2cf-471c-934b-15a0497af44b-config-data-custom\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.221645 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b9d56f1-c2cf-471c-934b-15a0497af44b-config-data\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.221702 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b9d56f1-c2cf-471c-934b-15a0497af44b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.222396 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b9d56f1-c2cf-471c-934b-15a0497af44b-logs\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.222449 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b9d56f1-c2cf-471c-934b-15a0497af44b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.224770 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b9d56f1-c2cf-471c-934b-15a0497af44b-scripts\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.230001 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b9d56f1-c2cf-471c-934b-15a0497af44b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.231684 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b9d56f1-c2cf-471c-934b-15a0497af44b-config-data\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.232915 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b9d56f1-c2cf-471c-934b-15a0497af44b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.242942 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b9d56f1-c2cf-471c-934b-15a0497af44b-config-data-custom\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.243156 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b9d56f1-c2cf-471c-934b-15a0497af44b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.244975 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nk9n\" (UniqueName: \"kubernetes.io/projected/6b9d56f1-c2cf-471c-934b-15a0497af44b-kube-api-access-4nk9n\") pod \"cinder-api-0\" (UID: \"6b9d56f1-c2cf-471c-934b-15a0497af44b\") " pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.323013 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fj4gk\" (UniqueName: \"kubernetes.io/projected/5874bbe7-300c-47c7-8273-dd48c0572ff8-kube-api-access-fj4gk\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.323086 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.323121 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.323143 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-scripts\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.323170 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5874bbe7-300c-47c7-8273-dd48c0572ff8-run-httpd\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.323203 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-config-data\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.323218 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5874bbe7-300c-47c7-8273-dd48c0572ff8-log-httpd\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.323724 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5874bbe7-300c-47c7-8273-dd48c0572ff8-log-httpd\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.323794 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5874bbe7-300c-47c7-8273-dd48c0572ff8-run-httpd\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.329028 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.329356 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-config-data\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.329827 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.330890 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-scripts\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.345094 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fj4gk\" (UniqueName: \"kubernetes.io/projected/5874bbe7-300c-47c7-8273-dd48c0572ff8-kube-api-access-fj4gk\") pod \"ceilometer-0\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.350935 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.386064 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.921055 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.976948 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" event={"ID":"2f9535d3-d81d-4e55-bc05-f36a8dd6b731","Type":"ContainerStarted","Data":"283901ac3c676621bfa42fbae1269fc17e94f4cb4f011a92351f954b2a0abda8"} Dec 04 17:51:35 crc kubenswrapper[4631]: I1204 17:51:35.977803 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" event={"ID":"2f9535d3-d81d-4e55-bc05-f36a8dd6b731","Type":"ContainerStarted","Data":"66fab7c3ef380ee7d722d3cebc3229d5dadc22d439dcf420c2427668a38a826e"} Dec 04 17:51:36 crc kubenswrapper[4631]: I1204 17:51:36.047816 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 04 17:51:36 crc kubenswrapper[4631]: W1204 17:51:36.053770 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b9d56f1_c2cf_471c_934b_15a0497af44b.slice/crio-53ae133ada539d911e18ab40e860a4982480a4c9f8ccb673b2c4c60d4017875b WatchSource:0}: Error finding container 53ae133ada539d911e18ab40e860a4982480a4c9f8ccb673b2c4c60d4017875b: Status 404 returned error can't find the container with id 53ae133ada539d911e18ab40e860a4982480a4c9f8ccb673b2c4c60d4017875b Dec 04 17:51:36 crc kubenswrapper[4631]: I1204 17:51:36.251691 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="953bfb33-b2e6-421f-b29f-127c1406800b" path="/var/lib/kubelet/pods/953bfb33-b2e6-421f-b29f-127c1406800b/volumes" Dec 04 17:51:36 crc kubenswrapper[4631]: I1204 17:51:36.253092 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec86a01c-35c1-4302-a746-843df2ed1e3b" path="/var/lib/kubelet/pods/ec86a01c-35c1-4302-a746-843df2ed1e3b/volumes" Dec 04 17:51:37 crc kubenswrapper[4631]: I1204 17:51:37.003445 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6b9d56f1-c2cf-471c-934b-15a0497af44b","Type":"ContainerStarted","Data":"5c4171ffea4ec112e430ce3665bc27d75fdd73ca8eaf31e890553696ae2cf5d4"} Dec 04 17:51:37 crc kubenswrapper[4631]: I1204 17:51:37.004442 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6b9d56f1-c2cf-471c-934b-15a0497af44b","Type":"ContainerStarted","Data":"53ae133ada539d911e18ab40e860a4982480a4c9f8ccb673b2c4c60d4017875b"} Dec 04 17:51:37 crc kubenswrapper[4631]: I1204 17:51:37.006721 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5874bbe7-300c-47c7-8273-dd48c0572ff8","Type":"ContainerStarted","Data":"308fa095474135faa7ad737dccc1e0bc3e03405f95497687e68ae8d2763967e7"} Dec 04 17:51:37 crc kubenswrapper[4631]: I1204 17:51:37.006873 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:37 crc kubenswrapper[4631]: I1204 17:51:37.006905 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:37 crc kubenswrapper[4631]: I1204 17:51:37.029855 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" podStartSLOduration=10.029836867 podStartE2EDuration="10.029836867s" podCreationTimestamp="2025-12-04 17:51:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:51:37.025433655 +0000 UTC m=+1427.057675653" watchObservedRunningTime="2025-12-04 17:51:37.029836867 +0000 UTC m=+1427.062078865" Dec 04 17:51:39 crc kubenswrapper[4631]: I1204 17:51:39.027813 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6b9d56f1-c2cf-471c-934b-15a0497af44b","Type":"ContainerStarted","Data":"be3df5316ad0772d88668ff244a37a47882a558adc2dcd3b3a35da712db1d568"} Dec 04 17:51:39 crc kubenswrapper[4631]: I1204 17:51:39.028343 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 04 17:51:39 crc kubenswrapper[4631]: I1204 17:51:39.038005 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5874bbe7-300c-47c7-8273-dd48c0572ff8","Type":"ContainerStarted","Data":"97fe0c5df319438c37a01746a9ae702a3fb3237773d4313f763c78780a0b7073"} Dec 04 17:51:39 crc kubenswrapper[4631]: I1204 17:51:39.039667 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:51:39 crc kubenswrapper[4631]: I1204 17:51:39.053796 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.053775932 podStartE2EDuration="5.053775932s" podCreationTimestamp="2025-12-04 17:51:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:51:39.052546518 +0000 UTC m=+1429.084788526" watchObservedRunningTime="2025-12-04 17:51:39.053775932 +0000 UTC m=+1429.086017940" Dec 04 17:51:39 crc kubenswrapper[4631]: I1204 17:51:39.098988 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 04 17:51:39 crc kubenswrapper[4631]: I1204 17:51:39.099287 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="f017066e-42ab-4e68-891d-5df98da845a9" containerName="kube-state-metrics" containerID="cri-o://75cf2585cda179f2ffdf506784f140fbc680a692784338a16fc4282e00ecd9ff" gracePeriod=30 Dec 04 17:51:40 crc kubenswrapper[4631]: I1204 17:51:40.049097 4631 generic.go:334] "Generic (PLEG): container finished" podID="f017066e-42ab-4e68-891d-5df98da845a9" containerID="75cf2585cda179f2ffdf506784f140fbc680a692784338a16fc4282e00ecd9ff" exitCode=2 Dec 04 17:51:40 crc kubenswrapper[4631]: I1204 17:51:40.049440 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f017066e-42ab-4e68-891d-5df98da845a9","Type":"ContainerDied","Data":"75cf2585cda179f2ffdf506784f140fbc680a692784338a16fc4282e00ecd9ff"} Dec 04 17:51:40 crc kubenswrapper[4631]: I1204 17:51:40.059470 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5874bbe7-300c-47c7-8273-dd48c0572ff8","Type":"ContainerStarted","Data":"b2a4310b414e01a09133eb24ee895996e68b35e09da929bf646021704116489c"} Dec 04 17:51:40 crc kubenswrapper[4631]: I1204 17:51:40.184871 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 04 17:51:40 crc kubenswrapper[4631]: I1204 17:51:40.264115 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gfw4d\" (UniqueName: \"kubernetes.io/projected/f017066e-42ab-4e68-891d-5df98da845a9-kube-api-access-gfw4d\") pod \"f017066e-42ab-4e68-891d-5df98da845a9\" (UID: \"f017066e-42ab-4e68-891d-5df98da845a9\") " Dec 04 17:51:40 crc kubenswrapper[4631]: I1204 17:51:40.323018 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f017066e-42ab-4e68-891d-5df98da845a9-kube-api-access-gfw4d" (OuterVolumeSpecName: "kube-api-access-gfw4d") pod "f017066e-42ab-4e68-891d-5df98da845a9" (UID: "f017066e-42ab-4e68-891d-5df98da845a9"). InnerVolumeSpecName "kube-api-access-gfw4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:51:40 crc kubenswrapper[4631]: I1204 17:51:40.397470 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gfw4d\" (UniqueName: \"kubernetes.io/projected/f017066e-42ab-4e68-891d-5df98da845a9-kube-api-access-gfw4d\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.072992 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f017066e-42ab-4e68-891d-5df98da845a9","Type":"ContainerDied","Data":"ef99ad66b5c7c1d061580f8b49ae0c05977d3450614e7fd77fa196592afbff06"} Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.073765 4631 scope.go:117] "RemoveContainer" containerID="75cf2585cda179f2ffdf506784f140fbc680a692784338a16fc4282e00ecd9ff" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.073955 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.134665 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.158444 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.167560 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 04 17:51:41 crc kubenswrapper[4631]: E1204 17:51:41.168015 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f017066e-42ab-4e68-891d-5df98da845a9" containerName="kube-state-metrics" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.168032 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="f017066e-42ab-4e68-891d-5df98da845a9" containerName="kube-state-metrics" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.168245 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="f017066e-42ab-4e68-891d-5df98da845a9" containerName="kube-state-metrics" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.170090 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.179430 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.179634 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.180304 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.212507 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/83a88a9d-413f-40ce-bae5-624b4cfe00c9-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"83a88a9d-413f-40ce-bae5-624b4cfe00c9\") " pod="openstack/kube-state-metrics-0" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.212568 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/83a88a9d-413f-40ce-bae5-624b4cfe00c9-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"83a88a9d-413f-40ce-bae5-624b4cfe00c9\") " pod="openstack/kube-state-metrics-0" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.212617 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83a88a9d-413f-40ce-bae5-624b4cfe00c9-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"83a88a9d-413f-40ce-bae5-624b4cfe00c9\") " pod="openstack/kube-state-metrics-0" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.212660 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xpjf\" (UniqueName: \"kubernetes.io/projected/83a88a9d-413f-40ce-bae5-624b4cfe00c9-kube-api-access-4xpjf\") pod \"kube-state-metrics-0\" (UID: \"83a88a9d-413f-40ce-bae5-624b4cfe00c9\") " pod="openstack/kube-state-metrics-0" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.314072 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/83a88a9d-413f-40ce-bae5-624b4cfe00c9-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"83a88a9d-413f-40ce-bae5-624b4cfe00c9\") " pod="openstack/kube-state-metrics-0" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.314140 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/83a88a9d-413f-40ce-bae5-624b4cfe00c9-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"83a88a9d-413f-40ce-bae5-624b4cfe00c9\") " pod="openstack/kube-state-metrics-0" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.314188 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83a88a9d-413f-40ce-bae5-624b4cfe00c9-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"83a88a9d-413f-40ce-bae5-624b4cfe00c9\") " pod="openstack/kube-state-metrics-0" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.314215 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xpjf\" (UniqueName: \"kubernetes.io/projected/83a88a9d-413f-40ce-bae5-624b4cfe00c9-kube-api-access-4xpjf\") pod \"kube-state-metrics-0\" (UID: \"83a88a9d-413f-40ce-bae5-624b4cfe00c9\") " pod="openstack/kube-state-metrics-0" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.321883 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83a88a9d-413f-40ce-bae5-624b4cfe00c9-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"83a88a9d-413f-40ce-bae5-624b4cfe00c9\") " pod="openstack/kube-state-metrics-0" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.339112 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/83a88a9d-413f-40ce-bae5-624b4cfe00c9-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"83a88a9d-413f-40ce-bae5-624b4cfe00c9\") " pod="openstack/kube-state-metrics-0" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.344679 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/83a88a9d-413f-40ce-bae5-624b4cfe00c9-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"83a88a9d-413f-40ce-bae5-624b4cfe00c9\") " pod="openstack/kube-state-metrics-0" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.345962 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xpjf\" (UniqueName: \"kubernetes.io/projected/83a88a9d-413f-40ce-bae5-624b4cfe00c9-kube-api-access-4xpjf\") pod \"kube-state-metrics-0\" (UID: \"83a88a9d-413f-40ce-bae5-624b4cfe00c9\") " pod="openstack/kube-state-metrics-0" Dec 04 17:51:41 crc kubenswrapper[4631]: I1204 17:51:41.499818 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 04 17:51:42 crc kubenswrapper[4631]: I1204 17:51:42.008034 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 04 17:51:42 crc kubenswrapper[4631]: W1204 17:51:42.025798 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod83a88a9d_413f_40ce_bae5_624b4cfe00c9.slice/crio-c44725c719e73673deace22eeeef5ccc172d4950c95cb67c2672c96b53620409 WatchSource:0}: Error finding container c44725c719e73673deace22eeeef5ccc172d4950c95cb67c2672c96b53620409: Status 404 returned error can't find the container with id c44725c719e73673deace22eeeef5ccc172d4950c95cb67c2672c96b53620409 Dec 04 17:51:42 crc kubenswrapper[4631]: I1204 17:51:42.089885 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5874bbe7-300c-47c7-8273-dd48c0572ff8","Type":"ContainerStarted","Data":"5d23a814fa7dc32e18bd6fa7518d1569d214ddd1d4b3cac555630948b67c60af"} Dec 04 17:51:42 crc kubenswrapper[4631]: I1204 17:51:42.093283 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"83a88a9d-413f-40ce-bae5-624b4cfe00c9","Type":"ContainerStarted","Data":"c44725c719e73673deace22eeeef5ccc172d4950c95cb67c2672c96b53620409"} Dec 04 17:51:42 crc kubenswrapper[4631]: I1204 17:51:42.249972 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f017066e-42ab-4e68-891d-5df98da845a9" path="/var/lib/kubelet/pods/f017066e-42ab-4e68-891d-5df98da845a9/volumes" Dec 04 17:51:42 crc kubenswrapper[4631]: I1204 17:51:42.875489 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7b99dd8d64-9nrvl" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Dec 04 17:51:42 crc kubenswrapper[4631]: I1204 17:51:42.875871 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:51:42 crc kubenswrapper[4631]: I1204 17:51:42.876731 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"7c15bbd3b5b3f4435f5df4d6fad19a863e4c5a1e382a6d66758122b5970fd161"} pod="openstack/horizon-7b99dd8d64-9nrvl" containerMessage="Container horizon failed startup probe, will be restarted" Dec 04 17:51:42 crc kubenswrapper[4631]: I1204 17:51:42.876782 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7b99dd8d64-9nrvl" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon" containerID="cri-o://7c15bbd3b5b3f4435f5df4d6fad19a863e4c5a1e382a6d66758122b5970fd161" gracePeriod=30 Dec 04 17:51:43 crc kubenswrapper[4631]: I1204 17:51:43.002313 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-77d5fd455b-8kwkp" podUID="78aafb4d-470c-477d-bfe6-5b7a29b79fc0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Dec 04 17:51:43 crc kubenswrapper[4631]: I1204 17:51:43.089222 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:43 crc kubenswrapper[4631]: I1204 17:51:43.091240 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5dd47bd8d5-qcz5l" Dec 04 17:51:43 crc kubenswrapper[4631]: I1204 17:51:43.141443 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"83a88a9d-413f-40ce-bae5-624b4cfe00c9","Type":"ContainerStarted","Data":"b0c5632f0a7d3b6a040ba3a4a053183b76650402dc3b8a59fb5dd87a20c122ae"} Dec 04 17:51:43 crc kubenswrapper[4631]: I1204 17:51:43.141483 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 04 17:51:43 crc kubenswrapper[4631]: I1204 17:51:43.186116 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.770879141 podStartE2EDuration="2.186092411s" podCreationTimestamp="2025-12-04 17:51:41 +0000 UTC" firstStartedPulling="2025-12-04 17:51:42.031339189 +0000 UTC m=+1432.063581187" lastFinishedPulling="2025-12-04 17:51:42.446552459 +0000 UTC m=+1432.478794457" observedRunningTime="2025-12-04 17:51:43.18172969 +0000 UTC m=+1433.213971688" watchObservedRunningTime="2025-12-04 17:51:43.186092411 +0000 UTC m=+1433.218334419" Dec 04 17:51:44 crc kubenswrapper[4631]: I1204 17:51:44.150968 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerName="ceilometer-central-agent" containerID="cri-o://97fe0c5df319438c37a01746a9ae702a3fb3237773d4313f763c78780a0b7073" gracePeriod=30 Dec 04 17:51:44 crc kubenswrapper[4631]: I1204 17:51:44.151455 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5874bbe7-300c-47c7-8273-dd48c0572ff8","Type":"ContainerStarted","Data":"da48d0a51468aa9ee3e474d03cbf270ae00fc8cfcab58e49cda2cba9ea406e99"} Dec 04 17:51:44 crc kubenswrapper[4631]: I1204 17:51:44.152436 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 04 17:51:44 crc kubenswrapper[4631]: I1204 17:51:44.151771 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerName="ceilometer-notification-agent" containerID="cri-o://b2a4310b414e01a09133eb24ee895996e68b35e09da929bf646021704116489c" gracePeriod=30 Dec 04 17:51:44 crc kubenswrapper[4631]: I1204 17:51:44.151786 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerName="sg-core" containerID="cri-o://5d23a814fa7dc32e18bd6fa7518d1569d214ddd1d4b3cac555630948b67c60af" gracePeriod=30 Dec 04 17:51:44 crc kubenswrapper[4631]: I1204 17:51:44.151712 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerName="proxy-httpd" containerID="cri-o://da48d0a51468aa9ee3e474d03cbf270ae00fc8cfcab58e49cda2cba9ea406e99" gracePeriod=30 Dec 04 17:51:44 crc kubenswrapper[4631]: I1204 17:51:44.188008 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.897651566 podStartE2EDuration="10.18799283s" podCreationTimestamp="2025-12-04 17:51:34 +0000 UTC" firstStartedPulling="2025-12-04 17:51:35.95328161 +0000 UTC m=+1425.985523608" lastFinishedPulling="2025-12-04 17:51:43.243622874 +0000 UTC m=+1433.275864872" observedRunningTime="2025-12-04 17:51:44.18365104 +0000 UTC m=+1434.215893038" watchObservedRunningTime="2025-12-04 17:51:44.18799283 +0000 UTC m=+1434.220234828" Dec 04 17:51:45 crc kubenswrapper[4631]: I1204 17:51:45.160793 4631 generic.go:334] "Generic (PLEG): container finished" podID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerID="5d23a814fa7dc32e18bd6fa7518d1569d214ddd1d4b3cac555630948b67c60af" exitCode=2 Dec 04 17:51:45 crc kubenswrapper[4631]: I1204 17:51:45.161095 4631 generic.go:334] "Generic (PLEG): container finished" podID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerID="b2a4310b414e01a09133eb24ee895996e68b35e09da929bf646021704116489c" exitCode=0 Dec 04 17:51:45 crc kubenswrapper[4631]: I1204 17:51:45.160873 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5874bbe7-300c-47c7-8273-dd48c0572ff8","Type":"ContainerDied","Data":"5d23a814fa7dc32e18bd6fa7518d1569d214ddd1d4b3cac555630948b67c60af"} Dec 04 17:51:45 crc kubenswrapper[4631]: I1204 17:51:45.161140 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5874bbe7-300c-47c7-8273-dd48c0572ff8","Type":"ContainerDied","Data":"b2a4310b414e01a09133eb24ee895996e68b35e09da929bf646021704116489c"} Dec 04 17:51:48 crc kubenswrapper[4631]: I1204 17:51:48.018664 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 04 17:51:48 crc kubenswrapper[4631]: I1204 17:51:48.846637 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-2hgnm"] Dec 04 17:51:48 crc kubenswrapper[4631]: I1204 17:51:48.850775 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-2hgnm" Dec 04 17:51:48 crc kubenswrapper[4631]: I1204 17:51:48.862687 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-2hgnm"] Dec 04 17:51:48 crc kubenswrapper[4631]: I1204 17:51:48.898835 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3cce586-6911-47cd-84ce-4bdef87f7bec-operator-scripts\") pod \"nova-api-db-create-2hgnm\" (UID: \"c3cce586-6911-47cd-84ce-4bdef87f7bec\") " pod="openstack/nova-api-db-create-2hgnm" Dec 04 17:51:48 crc kubenswrapper[4631]: I1204 17:51:48.898912 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lg6l9\" (UniqueName: \"kubernetes.io/projected/c3cce586-6911-47cd-84ce-4bdef87f7bec-kube-api-access-lg6l9\") pod \"nova-api-db-create-2hgnm\" (UID: \"c3cce586-6911-47cd-84ce-4bdef87f7bec\") " pod="openstack/nova-api-db-create-2hgnm" Dec 04 17:51:48 crc kubenswrapper[4631]: I1204 17:51:48.935660 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-5qnqc"] Dec 04 17:51:48 crc kubenswrapper[4631]: I1204 17:51:48.936753 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-5qnqc" Dec 04 17:51:48 crc kubenswrapper[4631]: I1204 17:51:48.974190 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-3f1a-account-create-update-bp6mt"] Dec 04 17:51:48 crc kubenswrapper[4631]: I1204 17:51:48.975302 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3f1a-account-create-update-bp6mt" Dec 04 17:51:48 crc kubenswrapper[4631]: I1204 17:51:48.977910 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:48.999928 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lg6l9\" (UniqueName: \"kubernetes.io/projected/c3cce586-6911-47cd-84ce-4bdef87f7bec-kube-api-access-lg6l9\") pod \"nova-api-db-create-2hgnm\" (UID: \"c3cce586-6911-47cd-84ce-4bdef87f7bec\") " pod="openstack/nova-api-db-create-2hgnm" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.000352 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3cce586-6911-47cd-84ce-4bdef87f7bec-operator-scripts\") pod \"nova-api-db-create-2hgnm\" (UID: \"c3cce586-6911-47cd-84ce-4bdef87f7bec\") " pod="openstack/nova-api-db-create-2hgnm" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.008245 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-5qnqc"] Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.013920 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3cce586-6911-47cd-84ce-4bdef87f7bec-operator-scripts\") pod \"nova-api-db-create-2hgnm\" (UID: \"c3cce586-6911-47cd-84ce-4bdef87f7bec\") " pod="openstack/nova-api-db-create-2hgnm" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.046783 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lg6l9\" (UniqueName: \"kubernetes.io/projected/c3cce586-6911-47cd-84ce-4bdef87f7bec-kube-api-access-lg6l9\") pod \"nova-api-db-create-2hgnm\" (UID: \"c3cce586-6911-47cd-84ce-4bdef87f7bec\") " pod="openstack/nova-api-db-create-2hgnm" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.058747 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-3f1a-account-create-update-bp6mt"] Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.087541 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-pn9l7"] Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.088679 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pn9l7" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.094866 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-pn9l7"] Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.116501 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7ghm\" (UniqueName: \"kubernetes.io/projected/2e9c2e3a-2169-4229-8d6c-63d4517c39fb-kube-api-access-s7ghm\") pod \"nova-cell0-db-create-5qnqc\" (UID: \"2e9c2e3a-2169-4229-8d6c-63d4517c39fb\") " pod="openstack/nova-cell0-db-create-5qnqc" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.116554 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e9c2e3a-2169-4229-8d6c-63d4517c39fb-operator-scripts\") pod \"nova-cell0-db-create-5qnqc\" (UID: \"2e9c2e3a-2169-4229-8d6c-63d4517c39fb\") " pod="openstack/nova-cell0-db-create-5qnqc" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.116588 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f62e7c75-f842-4abc-b88f-2a69145acea0-operator-scripts\") pod \"nova-api-3f1a-account-create-update-bp6mt\" (UID: \"f62e7c75-f842-4abc-b88f-2a69145acea0\") " pod="openstack/nova-api-3f1a-account-create-update-bp6mt" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.116608 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bl6sn\" (UniqueName: \"kubernetes.io/projected/f62e7c75-f842-4abc-b88f-2a69145acea0-kube-api-access-bl6sn\") pod \"nova-api-3f1a-account-create-update-bp6mt\" (UID: \"f62e7c75-f842-4abc-b88f-2a69145acea0\") " pod="openstack/nova-api-3f1a-account-create-update-bp6mt" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.173860 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-284d-account-create-update-q8jnv"] Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.175133 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-284d-account-create-update-q8jnv" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.178038 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-2hgnm" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.179778 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.188901 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-284d-account-create-update-q8jnv"] Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.208917 4631 generic.go:334] "Generic (PLEG): container finished" podID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerID="97fe0c5df319438c37a01746a9ae702a3fb3237773d4313f763c78780a0b7073" exitCode=0 Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.208971 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5874bbe7-300c-47c7-8273-dd48c0572ff8","Type":"ContainerDied","Data":"97fe0c5df319438c37a01746a9ae702a3fb3237773d4313f763c78780a0b7073"} Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.209984 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"c65652e6-704f-4f88-9b9d-435868d33e0e","Type":"ContainerStarted","Data":"af057b101a7317aa1e07ea834b661173ddc38962bdd21c8ecd8209243d6a0623"} Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.226592 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7ghm\" (UniqueName: \"kubernetes.io/projected/2e9c2e3a-2169-4229-8d6c-63d4517c39fb-kube-api-access-s7ghm\") pod \"nova-cell0-db-create-5qnqc\" (UID: \"2e9c2e3a-2169-4229-8d6c-63d4517c39fb\") " pod="openstack/nova-cell0-db-create-5qnqc" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.226657 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43ae9593-0539-4f0d-8221-4f4bb2684ec0-operator-scripts\") pod \"nova-cell0-284d-account-create-update-q8jnv\" (UID: \"43ae9593-0539-4f0d-8221-4f4bb2684ec0\") " pod="openstack/nova-cell0-284d-account-create-update-q8jnv" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.227081 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ft8w\" (UniqueName: \"kubernetes.io/projected/6acc24ed-974b-438a-b145-cc7923b76914-kube-api-access-5ft8w\") pod \"nova-cell1-db-create-pn9l7\" (UID: \"6acc24ed-974b-438a-b145-cc7923b76914\") " pod="openstack/nova-cell1-db-create-pn9l7" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.227128 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e9c2e3a-2169-4229-8d6c-63d4517c39fb-operator-scripts\") pod \"nova-cell0-db-create-5qnqc\" (UID: \"2e9c2e3a-2169-4229-8d6c-63d4517c39fb\") " pod="openstack/nova-cell0-db-create-5qnqc" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.227166 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f62e7c75-f842-4abc-b88f-2a69145acea0-operator-scripts\") pod \"nova-api-3f1a-account-create-update-bp6mt\" (UID: \"f62e7c75-f842-4abc-b88f-2a69145acea0\") " pod="openstack/nova-api-3f1a-account-create-update-bp6mt" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.227189 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bl6sn\" (UniqueName: \"kubernetes.io/projected/f62e7c75-f842-4abc-b88f-2a69145acea0-kube-api-access-bl6sn\") pod \"nova-api-3f1a-account-create-update-bp6mt\" (UID: \"f62e7c75-f842-4abc-b88f-2a69145acea0\") " pod="openstack/nova-api-3f1a-account-create-update-bp6mt" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.227232 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sv4r\" (UniqueName: \"kubernetes.io/projected/43ae9593-0539-4f0d-8221-4f4bb2684ec0-kube-api-access-2sv4r\") pod \"nova-cell0-284d-account-create-update-q8jnv\" (UID: \"43ae9593-0539-4f0d-8221-4f4bb2684ec0\") " pod="openstack/nova-cell0-284d-account-create-update-q8jnv" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.227535 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6acc24ed-974b-438a-b145-cc7923b76914-operator-scripts\") pod \"nova-cell1-db-create-pn9l7\" (UID: \"6acc24ed-974b-438a-b145-cc7923b76914\") " pod="openstack/nova-cell1-db-create-pn9l7" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.230220 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f62e7c75-f842-4abc-b88f-2a69145acea0-operator-scripts\") pod \"nova-api-3f1a-account-create-update-bp6mt\" (UID: \"f62e7c75-f842-4abc-b88f-2a69145acea0\") " pod="openstack/nova-api-3f1a-account-create-update-bp6mt" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.230913 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e9c2e3a-2169-4229-8d6c-63d4517c39fb-operator-scripts\") pod \"nova-cell0-db-create-5qnqc\" (UID: \"2e9c2e3a-2169-4229-8d6c-63d4517c39fb\") " pod="openstack/nova-cell0-db-create-5qnqc" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.251504 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.439944715 podStartE2EDuration="35.251489513s" podCreationTimestamp="2025-12-04 17:51:14 +0000 UTC" firstStartedPulling="2025-12-04 17:51:15.901563554 +0000 UTC m=+1405.933805552" lastFinishedPulling="2025-12-04 17:51:48.713108352 +0000 UTC m=+1438.745350350" observedRunningTime="2025-12-04 17:51:49.24738947 +0000 UTC m=+1439.279631478" watchObservedRunningTime="2025-12-04 17:51:49.251489513 +0000 UTC m=+1439.283731511" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.256516 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bl6sn\" (UniqueName: \"kubernetes.io/projected/f62e7c75-f842-4abc-b88f-2a69145acea0-kube-api-access-bl6sn\") pod \"nova-api-3f1a-account-create-update-bp6mt\" (UID: \"f62e7c75-f842-4abc-b88f-2a69145acea0\") " pod="openstack/nova-api-3f1a-account-create-update-bp6mt" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.256804 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7ghm\" (UniqueName: \"kubernetes.io/projected/2e9c2e3a-2169-4229-8d6c-63d4517c39fb-kube-api-access-s7ghm\") pod \"nova-cell0-db-create-5qnqc\" (UID: \"2e9c2e3a-2169-4229-8d6c-63d4517c39fb\") " pod="openstack/nova-cell0-db-create-5qnqc" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.274809 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-5qnqc" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.312537 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3f1a-account-create-update-bp6mt" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.332565 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6acc24ed-974b-438a-b145-cc7923b76914-operator-scripts\") pod \"nova-cell1-db-create-pn9l7\" (UID: \"6acc24ed-974b-438a-b145-cc7923b76914\") " pod="openstack/nova-cell1-db-create-pn9l7" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.332638 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43ae9593-0539-4f0d-8221-4f4bb2684ec0-operator-scripts\") pod \"nova-cell0-284d-account-create-update-q8jnv\" (UID: \"43ae9593-0539-4f0d-8221-4f4bb2684ec0\") " pod="openstack/nova-cell0-284d-account-create-update-q8jnv" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.332665 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ft8w\" (UniqueName: \"kubernetes.io/projected/6acc24ed-974b-438a-b145-cc7923b76914-kube-api-access-5ft8w\") pod \"nova-cell1-db-create-pn9l7\" (UID: \"6acc24ed-974b-438a-b145-cc7923b76914\") " pod="openstack/nova-cell1-db-create-pn9l7" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.332696 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sv4r\" (UniqueName: \"kubernetes.io/projected/43ae9593-0539-4f0d-8221-4f4bb2684ec0-kube-api-access-2sv4r\") pod \"nova-cell0-284d-account-create-update-q8jnv\" (UID: \"43ae9593-0539-4f0d-8221-4f4bb2684ec0\") " pod="openstack/nova-cell0-284d-account-create-update-q8jnv" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.334825 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6acc24ed-974b-438a-b145-cc7923b76914-operator-scripts\") pod \"nova-cell1-db-create-pn9l7\" (UID: \"6acc24ed-974b-438a-b145-cc7923b76914\") " pod="openstack/nova-cell1-db-create-pn9l7" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.335467 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43ae9593-0539-4f0d-8221-4f4bb2684ec0-operator-scripts\") pod \"nova-cell0-284d-account-create-update-q8jnv\" (UID: \"43ae9593-0539-4f0d-8221-4f4bb2684ec0\") " pod="openstack/nova-cell0-284d-account-create-update-q8jnv" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.349662 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-ab4b-account-create-update-lrvjf"] Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.351144 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-ab4b-account-create-update-lrvjf" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.362856 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.378868 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-ab4b-account-create-update-lrvjf"] Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.420353 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ft8w\" (UniqueName: \"kubernetes.io/projected/6acc24ed-974b-438a-b145-cc7923b76914-kube-api-access-5ft8w\") pod \"nova-cell1-db-create-pn9l7\" (UID: \"6acc24ed-974b-438a-b145-cc7923b76914\") " pod="openstack/nova-cell1-db-create-pn9l7" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.426554 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sv4r\" (UniqueName: \"kubernetes.io/projected/43ae9593-0539-4f0d-8221-4f4bb2684ec0-kube-api-access-2sv4r\") pod \"nova-cell0-284d-account-create-update-q8jnv\" (UID: \"43ae9593-0539-4f0d-8221-4f4bb2684ec0\") " pod="openstack/nova-cell0-284d-account-create-update-q8jnv" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.429695 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pn9l7" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.452798 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg86t\" (UniqueName: \"kubernetes.io/projected/090be7c8-dbbd-4eb5-8621-4443f4f809d0-kube-api-access-kg86t\") pod \"nova-cell1-ab4b-account-create-update-lrvjf\" (UID: \"090be7c8-dbbd-4eb5-8621-4443f4f809d0\") " pod="openstack/nova-cell1-ab4b-account-create-update-lrvjf" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.452953 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/090be7c8-dbbd-4eb5-8621-4443f4f809d0-operator-scripts\") pod \"nova-cell1-ab4b-account-create-update-lrvjf\" (UID: \"090be7c8-dbbd-4eb5-8621-4443f4f809d0\") " pod="openstack/nova-cell1-ab4b-account-create-update-lrvjf" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.504166 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-284d-account-create-update-q8jnv" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.558513 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg86t\" (UniqueName: \"kubernetes.io/projected/090be7c8-dbbd-4eb5-8621-4443f4f809d0-kube-api-access-kg86t\") pod \"nova-cell1-ab4b-account-create-update-lrvjf\" (UID: \"090be7c8-dbbd-4eb5-8621-4443f4f809d0\") " pod="openstack/nova-cell1-ab4b-account-create-update-lrvjf" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.558577 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/090be7c8-dbbd-4eb5-8621-4443f4f809d0-operator-scripts\") pod \"nova-cell1-ab4b-account-create-update-lrvjf\" (UID: \"090be7c8-dbbd-4eb5-8621-4443f4f809d0\") " pod="openstack/nova-cell1-ab4b-account-create-update-lrvjf" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.559430 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/090be7c8-dbbd-4eb5-8621-4443f4f809d0-operator-scripts\") pod \"nova-cell1-ab4b-account-create-update-lrvjf\" (UID: \"090be7c8-dbbd-4eb5-8621-4443f4f809d0\") " pod="openstack/nova-cell1-ab4b-account-create-update-lrvjf" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.598825 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg86t\" (UniqueName: \"kubernetes.io/projected/090be7c8-dbbd-4eb5-8621-4443f4f809d0-kube-api-access-kg86t\") pod \"nova-cell1-ab4b-account-create-update-lrvjf\" (UID: \"090be7c8-dbbd-4eb5-8621-4443f4f809d0\") " pod="openstack/nova-cell1-ab4b-account-create-update-lrvjf" Dec 04 17:51:49 crc kubenswrapper[4631]: I1204 17:51:49.746407 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-ab4b-account-create-update-lrvjf" Dec 04 17:51:50 crc kubenswrapper[4631]: I1204 17:51:49.999810 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-2hgnm"] Dec 04 17:51:50 crc kubenswrapper[4631]: W1204 17:51:50.053539 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3cce586_6911_47cd_84ce_4bdef87f7bec.slice/crio-2a9ea99510155d39b0be5cced84c4cfb3ee038b73c170166de905e1cc34fd21d WatchSource:0}: Error finding container 2a9ea99510155d39b0be5cced84c4cfb3ee038b73c170166de905e1cc34fd21d: Status 404 returned error can't find the container with id 2a9ea99510155d39b0be5cced84c4cfb3ee038b73c170166de905e1cc34fd21d Dec 04 17:51:50 crc kubenswrapper[4631]: I1204 17:51:50.312711 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-2hgnm" event={"ID":"c3cce586-6911-47cd-84ce-4bdef87f7bec","Type":"ContainerStarted","Data":"2a9ea99510155d39b0be5cced84c4cfb3ee038b73c170166de905e1cc34fd21d"} Dec 04 17:51:50 crc kubenswrapper[4631]: I1204 17:51:50.383555 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-5qnqc"] Dec 04 17:51:50 crc kubenswrapper[4631]: W1204 17:51:50.480475 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43ae9593_0539_4f0d_8221_4f4bb2684ec0.slice/crio-a5c3703c679ea6a21bf6e5dd974644346dc0ad2f40ffb595bb303cd8d30b548f WatchSource:0}: Error finding container a5c3703c679ea6a21bf6e5dd974644346dc0ad2f40ffb595bb303cd8d30b548f: Status 404 returned error can't find the container with id a5c3703c679ea6a21bf6e5dd974644346dc0ad2f40ffb595bb303cd8d30b548f Dec 04 17:51:50 crc kubenswrapper[4631]: I1204 17:51:50.483013 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-284d-account-create-update-q8jnv"] Dec 04 17:51:50 crc kubenswrapper[4631]: I1204 17:51:50.497911 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 04 17:51:50 crc kubenswrapper[4631]: I1204 17:51:50.675360 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-pn9l7"] Dec 04 17:51:50 crc kubenswrapper[4631]: I1204 17:51:50.717678 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-3f1a-account-create-update-bp6mt"] Dec 04 17:51:50 crc kubenswrapper[4631]: I1204 17:51:50.746842 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 04 17:51:50 crc kubenswrapper[4631]: I1204 17:51:50.809451 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-ab4b-account-create-update-lrvjf"] Dec 04 17:51:50 crc kubenswrapper[4631]: I1204 17:51:50.831280 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 04 17:51:51 crc kubenswrapper[4631]: I1204 17:51:51.275067 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-5qnqc" event={"ID":"2e9c2e3a-2169-4229-8d6c-63d4517c39fb","Type":"ContainerStarted","Data":"63706638dc4fff0a7dc40d5d179af363d500302902b079a10d0eb75106ffde20"} Dec 04 17:51:51 crc kubenswrapper[4631]: I1204 17:51:51.277253 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-284d-account-create-update-q8jnv" event={"ID":"43ae9593-0539-4f0d-8221-4f4bb2684ec0","Type":"ContainerStarted","Data":"a5c3703c679ea6a21bf6e5dd974644346dc0ad2f40ffb595bb303cd8d30b548f"} Dec 04 17:51:51 crc kubenswrapper[4631]: I1204 17:51:51.279733 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-3f1a-account-create-update-bp6mt" event={"ID":"f62e7c75-f842-4abc-b88f-2a69145acea0","Type":"ContainerStarted","Data":"0fdb89b798fa737bea6d5dced4b19ceb973eb5b3d0daa064791d8002e034f604"} Dec 04 17:51:51 crc kubenswrapper[4631]: I1204 17:51:51.282699 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-ab4b-account-create-update-lrvjf" event={"ID":"090be7c8-dbbd-4eb5-8621-4443f4f809d0","Type":"ContainerStarted","Data":"3835be4e22bfc10f4ef0cea7b8c5753ab0f951c542a8c40fe8c45b1e98182a9a"} Dec 04 17:51:51 crc kubenswrapper[4631]: I1204 17:51:51.284541 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pn9l7" event={"ID":"6acc24ed-974b-438a-b145-cc7923b76914","Type":"ContainerStarted","Data":"fd6856a1466a51517807cf8f598ef1683868b43da03354be9119b01b018f57b4"} Dec 04 17:51:51 crc kubenswrapper[4631]: I1204 17:51:51.513209 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 04 17:51:52 crc kubenswrapper[4631]: I1204 17:51:52.294784 4631 generic.go:334] "Generic (PLEG): container finished" podID="090be7c8-dbbd-4eb5-8621-4443f4f809d0" containerID="f12dd277fd3ef68fc55962e03d460d3797eba63fd5e80c0122fbfc7b96b9f12b" exitCode=0 Dec 04 17:51:52 crc kubenswrapper[4631]: I1204 17:51:52.295096 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-ab4b-account-create-update-lrvjf" event={"ID":"090be7c8-dbbd-4eb5-8621-4443f4f809d0","Type":"ContainerDied","Data":"f12dd277fd3ef68fc55962e03d460d3797eba63fd5e80c0122fbfc7b96b9f12b"} Dec 04 17:51:52 crc kubenswrapper[4631]: I1204 17:51:52.298869 4631 generic.go:334] "Generic (PLEG): container finished" podID="6acc24ed-974b-438a-b145-cc7923b76914" containerID="dd2921bedf860eba92834a72932d307a218f70e5b8d79c3f82fcb22c4b8118d8" exitCode=0 Dec 04 17:51:52 crc kubenswrapper[4631]: I1204 17:51:52.298969 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pn9l7" event={"ID":"6acc24ed-974b-438a-b145-cc7923b76914","Type":"ContainerDied","Data":"dd2921bedf860eba92834a72932d307a218f70e5b8d79c3f82fcb22c4b8118d8"} Dec 04 17:51:52 crc kubenswrapper[4631]: I1204 17:51:52.300741 4631 generic.go:334] "Generic (PLEG): container finished" podID="2e9c2e3a-2169-4229-8d6c-63d4517c39fb" containerID="c72412eb2687c1a80bdd9f260fbdbccbbaf38961fee3f92777bb55a593049856" exitCode=0 Dec 04 17:51:52 crc kubenswrapper[4631]: I1204 17:51:52.300793 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-5qnqc" event={"ID":"2e9c2e3a-2169-4229-8d6c-63d4517c39fb","Type":"ContainerDied","Data":"c72412eb2687c1a80bdd9f260fbdbccbbaf38961fee3f92777bb55a593049856"} Dec 04 17:51:52 crc kubenswrapper[4631]: I1204 17:51:52.302922 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-284d-account-create-update-q8jnv" event={"ID":"43ae9593-0539-4f0d-8221-4f4bb2684ec0","Type":"ContainerStarted","Data":"aee9972e5505514c6ecc9f92ae4ebb5759e423f6b0df49f5f0e12e6ecdf9ee45"} Dec 04 17:51:52 crc kubenswrapper[4631]: I1204 17:51:52.304865 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-3f1a-account-create-update-bp6mt" event={"ID":"f62e7c75-f842-4abc-b88f-2a69145acea0","Type":"ContainerStarted","Data":"e59ce63ded7ad1e0e4707b1e4602b31a146be1d8012a8c66491df68b7bf32de0"} Dec 04 17:51:52 crc kubenswrapper[4631]: I1204 17:51:52.308881 4631 generic.go:334] "Generic (PLEG): container finished" podID="c3cce586-6911-47cd-84ce-4bdef87f7bec" containerID="2a6c55ec3d8f196fc84bc5250bc2ac8f139c8df01349af9d443229a6187ced45" exitCode=0 Dec 04 17:51:52 crc kubenswrapper[4631]: I1204 17:51:52.308925 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-2hgnm" event={"ID":"c3cce586-6911-47cd-84ce-4bdef87f7bec","Type":"ContainerDied","Data":"2a6c55ec3d8f196fc84bc5250bc2ac8f139c8df01349af9d443229a6187ced45"} Dec 04 17:51:52 crc kubenswrapper[4631]: I1204 17:51:52.357547 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-api-0" podUID="6b9d56f1-c2cf-471c-934b-15a0497af44b" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.169:8776/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 17:51:52 crc kubenswrapper[4631]: I1204 17:51:52.361333 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-284d-account-create-update-q8jnv" podStartSLOduration=3.361309073 podStartE2EDuration="3.361309073s" podCreationTimestamp="2025-12-04 17:51:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:51:52.349682491 +0000 UTC m=+1442.381924489" watchObservedRunningTime="2025-12-04 17:51:52.361309073 +0000 UTC m=+1442.393551071" Dec 04 17:51:52 crc kubenswrapper[4631]: I1204 17:51:52.416496 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-3f1a-account-create-update-bp6mt" podStartSLOduration=4.416476391 podStartE2EDuration="4.416476391s" podCreationTimestamp="2025-12-04 17:51:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:51:52.392038244 +0000 UTC m=+1442.424280262" watchObservedRunningTime="2025-12-04 17:51:52.416476391 +0000 UTC m=+1442.448718389" Dec 04 17:51:53 crc kubenswrapper[4631]: I1204 17:51:53.320319 4631 generic.go:334] "Generic (PLEG): container finished" podID="43ae9593-0539-4f0d-8221-4f4bb2684ec0" containerID="aee9972e5505514c6ecc9f92ae4ebb5759e423f6b0df49f5f0e12e6ecdf9ee45" exitCode=0 Dec 04 17:51:53 crc kubenswrapper[4631]: I1204 17:51:53.320483 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-284d-account-create-update-q8jnv" event={"ID":"43ae9593-0539-4f0d-8221-4f4bb2684ec0","Type":"ContainerDied","Data":"aee9972e5505514c6ecc9f92ae4ebb5759e423f6b0df49f5f0e12e6ecdf9ee45"} Dec 04 17:51:53 crc kubenswrapper[4631]: I1204 17:51:53.326075 4631 generic.go:334] "Generic (PLEG): container finished" podID="f62e7c75-f842-4abc-b88f-2a69145acea0" containerID="e59ce63ded7ad1e0e4707b1e4602b31a146be1d8012a8c66491df68b7bf32de0" exitCode=0 Dec 04 17:51:53 crc kubenswrapper[4631]: I1204 17:51:53.326285 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-3f1a-account-create-update-bp6mt" event={"ID":"f62e7c75-f842-4abc-b88f-2a69145acea0","Type":"ContainerDied","Data":"e59ce63ded7ad1e0e4707b1e4602b31a146be1d8012a8c66491df68b7bf32de0"} Dec 04 17:51:53 crc kubenswrapper[4631]: I1204 17:51:53.846903 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pn9l7" Dec 04 17:51:53 crc kubenswrapper[4631]: I1204 17:51:53.974569 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6acc24ed-974b-438a-b145-cc7923b76914-operator-scripts\") pod \"6acc24ed-974b-438a-b145-cc7923b76914\" (UID: \"6acc24ed-974b-438a-b145-cc7923b76914\") " Dec 04 17:51:53 crc kubenswrapper[4631]: I1204 17:51:53.975975 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ft8w\" (UniqueName: \"kubernetes.io/projected/6acc24ed-974b-438a-b145-cc7923b76914-kube-api-access-5ft8w\") pod \"6acc24ed-974b-438a-b145-cc7923b76914\" (UID: \"6acc24ed-974b-438a-b145-cc7923b76914\") " Dec 04 17:51:53 crc kubenswrapper[4631]: I1204 17:51:53.975278 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6acc24ed-974b-438a-b145-cc7923b76914-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6acc24ed-974b-438a-b145-cc7923b76914" (UID: "6acc24ed-974b-438a-b145-cc7923b76914"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.006669 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6acc24ed-974b-438a-b145-cc7923b76914-kube-api-access-5ft8w" (OuterVolumeSpecName: "kube-api-access-5ft8w") pod "6acc24ed-974b-438a-b145-cc7923b76914" (UID: "6acc24ed-974b-438a-b145-cc7923b76914"). InnerVolumeSpecName "kube-api-access-5ft8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.078259 4631 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6acc24ed-974b-438a-b145-cc7923b76914-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.078606 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ft8w\" (UniqueName: \"kubernetes.io/projected/6acc24ed-974b-438a-b145-cc7923b76914-kube-api-access-5ft8w\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.225067 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-ab4b-account-create-update-lrvjf" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.231725 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-5qnqc" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.238184 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-2hgnm" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.339490 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-2hgnm" event={"ID":"c3cce586-6911-47cd-84ce-4bdef87f7bec","Type":"ContainerDied","Data":"2a9ea99510155d39b0be5cced84c4cfb3ee038b73c170166de905e1cc34fd21d"} Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.339529 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2a9ea99510155d39b0be5cced84c4cfb3ee038b73c170166de905e1cc34fd21d" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.339581 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-2hgnm" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.341154 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-ab4b-account-create-update-lrvjf" event={"ID":"090be7c8-dbbd-4eb5-8621-4443f4f809d0","Type":"ContainerDied","Data":"3835be4e22bfc10f4ef0cea7b8c5753ab0f951c542a8c40fe8c45b1e98182a9a"} Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.341176 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3835be4e22bfc10f4ef0cea7b8c5753ab0f951c542a8c40fe8c45b1e98182a9a" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.341229 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-ab4b-account-create-update-lrvjf" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.342705 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pn9l7" event={"ID":"6acc24ed-974b-438a-b145-cc7923b76914","Type":"ContainerDied","Data":"fd6856a1466a51517807cf8f598ef1683868b43da03354be9119b01b018f57b4"} Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.342727 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd6856a1466a51517807cf8f598ef1683868b43da03354be9119b01b018f57b4" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.342734 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pn9l7" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.350764 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-5qnqc" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.351095 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-5qnqc" event={"ID":"2e9c2e3a-2169-4229-8d6c-63d4517c39fb","Type":"ContainerDied","Data":"63706638dc4fff0a7dc40d5d179af363d500302902b079a10d0eb75106ffde20"} Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.351120 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="63706638dc4fff0a7dc40d5d179af363d500302902b079a10d0eb75106ffde20" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.386141 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3cce586-6911-47cd-84ce-4bdef87f7bec-operator-scripts\") pod \"c3cce586-6911-47cd-84ce-4bdef87f7bec\" (UID: \"c3cce586-6911-47cd-84ce-4bdef87f7bec\") " Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.386219 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/090be7c8-dbbd-4eb5-8621-4443f4f809d0-operator-scripts\") pod \"090be7c8-dbbd-4eb5-8621-4443f4f809d0\" (UID: \"090be7c8-dbbd-4eb5-8621-4443f4f809d0\") " Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.386280 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lg6l9\" (UniqueName: \"kubernetes.io/projected/c3cce586-6911-47cd-84ce-4bdef87f7bec-kube-api-access-lg6l9\") pod \"c3cce586-6911-47cd-84ce-4bdef87f7bec\" (UID: \"c3cce586-6911-47cd-84ce-4bdef87f7bec\") " Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.386319 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e9c2e3a-2169-4229-8d6c-63d4517c39fb-operator-scripts\") pod \"2e9c2e3a-2169-4229-8d6c-63d4517c39fb\" (UID: \"2e9c2e3a-2169-4229-8d6c-63d4517c39fb\") " Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.386417 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7ghm\" (UniqueName: \"kubernetes.io/projected/2e9c2e3a-2169-4229-8d6c-63d4517c39fb-kube-api-access-s7ghm\") pod \"2e9c2e3a-2169-4229-8d6c-63d4517c39fb\" (UID: \"2e9c2e3a-2169-4229-8d6c-63d4517c39fb\") " Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.386486 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kg86t\" (UniqueName: \"kubernetes.io/projected/090be7c8-dbbd-4eb5-8621-4443f4f809d0-kube-api-access-kg86t\") pod \"090be7c8-dbbd-4eb5-8621-4443f4f809d0\" (UID: \"090be7c8-dbbd-4eb5-8621-4443f4f809d0\") " Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.388040 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e9c2e3a-2169-4229-8d6c-63d4517c39fb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2e9c2e3a-2169-4229-8d6c-63d4517c39fb" (UID: "2e9c2e3a-2169-4229-8d6c-63d4517c39fb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.388098 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/090be7c8-dbbd-4eb5-8621-4443f4f809d0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "090be7c8-dbbd-4eb5-8621-4443f4f809d0" (UID: "090be7c8-dbbd-4eb5-8621-4443f4f809d0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.388476 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3cce586-6911-47cd-84ce-4bdef87f7bec-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c3cce586-6911-47cd-84ce-4bdef87f7bec" (UID: "c3cce586-6911-47cd-84ce-4bdef87f7bec"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.391448 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/090be7c8-dbbd-4eb5-8621-4443f4f809d0-kube-api-access-kg86t" (OuterVolumeSpecName: "kube-api-access-kg86t") pod "090be7c8-dbbd-4eb5-8621-4443f4f809d0" (UID: "090be7c8-dbbd-4eb5-8621-4443f4f809d0"). InnerVolumeSpecName "kube-api-access-kg86t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.394497 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3cce586-6911-47cd-84ce-4bdef87f7bec-kube-api-access-lg6l9" (OuterVolumeSpecName: "kube-api-access-lg6l9") pod "c3cce586-6911-47cd-84ce-4bdef87f7bec" (UID: "c3cce586-6911-47cd-84ce-4bdef87f7bec"). InnerVolumeSpecName "kube-api-access-lg6l9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.394601 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e9c2e3a-2169-4229-8d6c-63d4517c39fb-kube-api-access-s7ghm" (OuterVolumeSpecName: "kube-api-access-s7ghm") pod "2e9c2e3a-2169-4229-8d6c-63d4517c39fb" (UID: "2e9c2e3a-2169-4229-8d6c-63d4517c39fb"). InnerVolumeSpecName "kube-api-access-s7ghm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.490602 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kg86t\" (UniqueName: \"kubernetes.io/projected/090be7c8-dbbd-4eb5-8621-4443f4f809d0-kube-api-access-kg86t\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.490645 4631 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3cce586-6911-47cd-84ce-4bdef87f7bec-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.490661 4631 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/090be7c8-dbbd-4eb5-8621-4443f4f809d0-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.490674 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lg6l9\" (UniqueName: \"kubernetes.io/projected/c3cce586-6911-47cd-84ce-4bdef87f7bec-kube-api-access-lg6l9\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.490687 4631 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e9c2e3a-2169-4229-8d6c-63d4517c39fb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.490700 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7ghm\" (UniqueName: \"kubernetes.io/projected/2e9c2e3a-2169-4229-8d6c-63d4517c39fb-kube-api-access-s7ghm\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.566994 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3f1a-account-create-update-bp6mt" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.706411 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f62e7c75-f842-4abc-b88f-2a69145acea0-operator-scripts\") pod \"f62e7c75-f842-4abc-b88f-2a69145acea0\" (UID: \"f62e7c75-f842-4abc-b88f-2a69145acea0\") " Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.706533 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bl6sn\" (UniqueName: \"kubernetes.io/projected/f62e7c75-f842-4abc-b88f-2a69145acea0-kube-api-access-bl6sn\") pod \"f62e7c75-f842-4abc-b88f-2a69145acea0\" (UID: \"f62e7c75-f842-4abc-b88f-2a69145acea0\") " Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.709783 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f62e7c75-f842-4abc-b88f-2a69145acea0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f62e7c75-f842-4abc-b88f-2a69145acea0" (UID: "f62e7c75-f842-4abc-b88f-2a69145acea0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.711578 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f62e7c75-f842-4abc-b88f-2a69145acea0-kube-api-access-bl6sn" (OuterVolumeSpecName: "kube-api-access-bl6sn") pod "f62e7c75-f842-4abc-b88f-2a69145acea0" (UID: "f62e7c75-f842-4abc-b88f-2a69145acea0"). InnerVolumeSpecName "kube-api-access-bl6sn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.724608 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-284d-account-create-update-q8jnv" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.809494 4631 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f62e7c75-f842-4abc-b88f-2a69145acea0-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.809525 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bl6sn\" (UniqueName: \"kubernetes.io/projected/f62e7c75-f842-4abc-b88f-2a69145acea0-kube-api-access-bl6sn\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.911096 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43ae9593-0539-4f0d-8221-4f4bb2684ec0-operator-scripts\") pod \"43ae9593-0539-4f0d-8221-4f4bb2684ec0\" (UID: \"43ae9593-0539-4f0d-8221-4f4bb2684ec0\") " Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.911661 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2sv4r\" (UniqueName: \"kubernetes.io/projected/43ae9593-0539-4f0d-8221-4f4bb2684ec0-kube-api-access-2sv4r\") pod \"43ae9593-0539-4f0d-8221-4f4bb2684ec0\" (UID: \"43ae9593-0539-4f0d-8221-4f4bb2684ec0\") " Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.911496 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43ae9593-0539-4f0d-8221-4f4bb2684ec0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "43ae9593-0539-4f0d-8221-4f4bb2684ec0" (UID: "43ae9593-0539-4f0d-8221-4f4bb2684ec0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.912023 4631 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43ae9593-0539-4f0d-8221-4f4bb2684ec0-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:54 crc kubenswrapper[4631]: I1204 17:51:54.914770 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43ae9593-0539-4f0d-8221-4f4bb2684ec0-kube-api-access-2sv4r" (OuterVolumeSpecName: "kube-api-access-2sv4r") pod "43ae9593-0539-4f0d-8221-4f4bb2684ec0" (UID: "43ae9593-0539-4f0d-8221-4f4bb2684ec0"). InnerVolumeSpecName "kube-api-access-2sv4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:51:55 crc kubenswrapper[4631]: I1204 17:51:55.013511 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2sv4r\" (UniqueName: \"kubernetes.io/projected/43ae9593-0539-4f0d-8221-4f4bb2684ec0-kube-api-access-2sv4r\") on node \"crc\" DevicePath \"\"" Dec 04 17:51:55 crc kubenswrapper[4631]: I1204 17:51:55.362259 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-284d-account-create-update-q8jnv" event={"ID":"43ae9593-0539-4f0d-8221-4f4bb2684ec0","Type":"ContainerDied","Data":"a5c3703c679ea6a21bf6e5dd974644346dc0ad2f40ffb595bb303cd8d30b548f"} Dec 04 17:51:55 crc kubenswrapper[4631]: I1204 17:51:55.362303 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5c3703c679ea6a21bf6e5dd974644346dc0ad2f40ffb595bb303cd8d30b548f" Dec 04 17:51:55 crc kubenswrapper[4631]: I1204 17:51:55.362302 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-284d-account-create-update-q8jnv" Dec 04 17:51:55 crc kubenswrapper[4631]: I1204 17:51:55.371594 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-3f1a-account-create-update-bp6mt" event={"ID":"f62e7c75-f842-4abc-b88f-2a69145acea0","Type":"ContainerDied","Data":"0fdb89b798fa737bea6d5dced4b19ceb973eb5b3d0daa064791d8002e034f604"} Dec 04 17:51:55 crc kubenswrapper[4631]: I1204 17:51:55.371640 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0fdb89b798fa737bea6d5dced4b19ceb973eb5b3d0daa064791d8002e034f604" Dec 04 17:51:55 crc kubenswrapper[4631]: I1204 17:51:55.371704 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3f1a-account-create-update-bp6mt" Dec 04 17:51:56 crc kubenswrapper[4631]: I1204 17:51:56.386431 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:51:58 crc kubenswrapper[4631]: I1204 17:51:58.167847 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-77d5fd455b-8kwkp" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.439051 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7b99dd8d64-9nrvl"] Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.730482 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vkqmx"] Dec 04 17:51:59 crc kubenswrapper[4631]: E1204 17:51:59.730909 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3cce586-6911-47cd-84ce-4bdef87f7bec" containerName="mariadb-database-create" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.730929 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3cce586-6911-47cd-84ce-4bdef87f7bec" containerName="mariadb-database-create" Dec 04 17:51:59 crc kubenswrapper[4631]: E1204 17:51:59.730947 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43ae9593-0539-4f0d-8221-4f4bb2684ec0" containerName="mariadb-account-create-update" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.730955 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="43ae9593-0539-4f0d-8221-4f4bb2684ec0" containerName="mariadb-account-create-update" Dec 04 17:51:59 crc kubenswrapper[4631]: E1204 17:51:59.730981 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="090be7c8-dbbd-4eb5-8621-4443f4f809d0" containerName="mariadb-account-create-update" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.730988 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="090be7c8-dbbd-4eb5-8621-4443f4f809d0" containerName="mariadb-account-create-update" Dec 04 17:51:59 crc kubenswrapper[4631]: E1204 17:51:59.731007 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e9c2e3a-2169-4229-8d6c-63d4517c39fb" containerName="mariadb-database-create" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.731015 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e9c2e3a-2169-4229-8d6c-63d4517c39fb" containerName="mariadb-database-create" Dec 04 17:51:59 crc kubenswrapper[4631]: E1204 17:51:59.731034 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f62e7c75-f842-4abc-b88f-2a69145acea0" containerName="mariadb-account-create-update" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.731042 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="f62e7c75-f842-4abc-b88f-2a69145acea0" containerName="mariadb-account-create-update" Dec 04 17:51:59 crc kubenswrapper[4631]: E1204 17:51:59.731057 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6acc24ed-974b-438a-b145-cc7923b76914" containerName="mariadb-database-create" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.731064 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="6acc24ed-974b-438a-b145-cc7923b76914" containerName="mariadb-database-create" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.731255 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3cce586-6911-47cd-84ce-4bdef87f7bec" containerName="mariadb-database-create" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.731272 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e9c2e3a-2169-4229-8d6c-63d4517c39fb" containerName="mariadb-database-create" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.731282 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="f62e7c75-f842-4abc-b88f-2a69145acea0" containerName="mariadb-account-create-update" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.731301 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="090be7c8-dbbd-4eb5-8621-4443f4f809d0" containerName="mariadb-account-create-update" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.731322 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="6acc24ed-974b-438a-b145-cc7923b76914" containerName="mariadb-database-create" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.731338 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="43ae9593-0539-4f0d-8221-4f4bb2684ec0" containerName="mariadb-account-create-update" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.732191 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vkqmx" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.735569 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.735708 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-99lk4" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.736509 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.770019 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vkqmx"] Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.906103 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3de6752-29b4-433b-8dcc-3237237aec3b-scripts\") pod \"nova-cell0-conductor-db-sync-vkqmx\" (UID: \"d3de6752-29b4-433b-8dcc-3237237aec3b\") " pod="openstack/nova-cell0-conductor-db-sync-vkqmx" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.906184 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3de6752-29b4-433b-8dcc-3237237aec3b-config-data\") pod \"nova-cell0-conductor-db-sync-vkqmx\" (UID: \"d3de6752-29b4-433b-8dcc-3237237aec3b\") " pod="openstack/nova-cell0-conductor-db-sync-vkqmx" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.906321 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3de6752-29b4-433b-8dcc-3237237aec3b-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vkqmx\" (UID: \"d3de6752-29b4-433b-8dcc-3237237aec3b\") " pod="openstack/nova-cell0-conductor-db-sync-vkqmx" Dec 04 17:51:59 crc kubenswrapper[4631]: I1204 17:51:59.906384 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-567k4\" (UniqueName: \"kubernetes.io/projected/d3de6752-29b4-433b-8dcc-3237237aec3b-kube-api-access-567k4\") pod \"nova-cell0-conductor-db-sync-vkqmx\" (UID: \"d3de6752-29b4-433b-8dcc-3237237aec3b\") " pod="openstack/nova-cell0-conductor-db-sync-vkqmx" Dec 04 17:52:00 crc kubenswrapper[4631]: I1204 17:52:00.007714 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3de6752-29b4-433b-8dcc-3237237aec3b-config-data\") pod \"nova-cell0-conductor-db-sync-vkqmx\" (UID: \"d3de6752-29b4-433b-8dcc-3237237aec3b\") " pod="openstack/nova-cell0-conductor-db-sync-vkqmx" Dec 04 17:52:00 crc kubenswrapper[4631]: I1204 17:52:00.007815 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3de6752-29b4-433b-8dcc-3237237aec3b-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vkqmx\" (UID: \"d3de6752-29b4-433b-8dcc-3237237aec3b\") " pod="openstack/nova-cell0-conductor-db-sync-vkqmx" Dec 04 17:52:00 crc kubenswrapper[4631]: I1204 17:52:00.007850 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-567k4\" (UniqueName: \"kubernetes.io/projected/d3de6752-29b4-433b-8dcc-3237237aec3b-kube-api-access-567k4\") pod \"nova-cell0-conductor-db-sync-vkqmx\" (UID: \"d3de6752-29b4-433b-8dcc-3237237aec3b\") " pod="openstack/nova-cell0-conductor-db-sync-vkqmx" Dec 04 17:52:00 crc kubenswrapper[4631]: I1204 17:52:00.007915 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3de6752-29b4-433b-8dcc-3237237aec3b-scripts\") pod \"nova-cell0-conductor-db-sync-vkqmx\" (UID: \"d3de6752-29b4-433b-8dcc-3237237aec3b\") " pod="openstack/nova-cell0-conductor-db-sync-vkqmx" Dec 04 17:52:00 crc kubenswrapper[4631]: I1204 17:52:00.013510 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3de6752-29b4-433b-8dcc-3237237aec3b-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vkqmx\" (UID: \"d3de6752-29b4-433b-8dcc-3237237aec3b\") " pod="openstack/nova-cell0-conductor-db-sync-vkqmx" Dec 04 17:52:00 crc kubenswrapper[4631]: I1204 17:52:00.014735 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3de6752-29b4-433b-8dcc-3237237aec3b-scripts\") pod \"nova-cell0-conductor-db-sync-vkqmx\" (UID: \"d3de6752-29b4-433b-8dcc-3237237aec3b\") " pod="openstack/nova-cell0-conductor-db-sync-vkqmx" Dec 04 17:52:00 crc kubenswrapper[4631]: I1204 17:52:00.016167 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3de6752-29b4-433b-8dcc-3237237aec3b-config-data\") pod \"nova-cell0-conductor-db-sync-vkqmx\" (UID: \"d3de6752-29b4-433b-8dcc-3237237aec3b\") " pod="openstack/nova-cell0-conductor-db-sync-vkqmx" Dec 04 17:52:00 crc kubenswrapper[4631]: I1204 17:52:00.025648 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-567k4\" (UniqueName: \"kubernetes.io/projected/d3de6752-29b4-433b-8dcc-3237237aec3b-kube-api-access-567k4\") pod \"nova-cell0-conductor-db-sync-vkqmx\" (UID: \"d3de6752-29b4-433b-8dcc-3237237aec3b\") " pod="openstack/nova-cell0-conductor-db-sync-vkqmx" Dec 04 17:52:00 crc kubenswrapper[4631]: I1204 17:52:00.050492 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vkqmx" Dec 04 17:52:00 crc kubenswrapper[4631]: I1204 17:52:00.550907 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vkqmx"] Dec 04 17:52:01 crc kubenswrapper[4631]: I1204 17:52:01.427075 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vkqmx" event={"ID":"d3de6752-29b4-433b-8dcc-3237237aec3b","Type":"ContainerStarted","Data":"9491acd1151432febf8c28e32a9df117d8340b3e094766c206fcff2d70a81dec"} Dec 04 17:52:05 crc kubenswrapper[4631]: I1204 17:52:05.396060 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 04 17:52:13 crc kubenswrapper[4631]: I1204 17:52:13.553268 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vkqmx" event={"ID":"d3de6752-29b4-433b-8dcc-3237237aec3b","Type":"ContainerStarted","Data":"3b71d7ebbfd54a77a51efd85d5e362b36f3a7f7200c00dc8c9e6999b98e07e0f"} Dec 04 17:52:13 crc kubenswrapper[4631]: I1204 17:52:13.556307 4631 generic.go:334] "Generic (PLEG): container finished" podID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerID="7c15bbd3b5b3f4435f5df4d6fad19a863e4c5a1e382a6d66758122b5970fd161" exitCode=137 Dec 04 17:52:13 crc kubenswrapper[4631]: I1204 17:52:13.556345 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b99dd8d64-9nrvl" event={"ID":"a675d52a-03e9-46e8-8b51-4e7f378179cf","Type":"ContainerDied","Data":"7c15bbd3b5b3f4435f5df4d6fad19a863e4c5a1e382a6d66758122b5970fd161"} Dec 04 17:52:13 crc kubenswrapper[4631]: I1204 17:52:13.556366 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b99dd8d64-9nrvl" event={"ID":"a675d52a-03e9-46e8-8b51-4e7f378179cf","Type":"ContainerStarted","Data":"ea0539fbc919798993abcd7146bb0239a2e44ab54442e650d0342a83e2706f53"} Dec 04 17:52:13 crc kubenswrapper[4631]: I1204 17:52:13.556406 4631 scope.go:117] "RemoveContainer" containerID="c0232110e98619eba87d73ad4f17b16ad869ae5f8c81c4f0cb9721c3b739dcde" Dec 04 17:52:13 crc kubenswrapper[4631]: I1204 17:52:13.556571 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7b99dd8d64-9nrvl" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon-log" containerID="cri-o://dd83d7f3d8572b684138f5f3d8a13e79de4098e2db853d6a15e483ce33e834ab" gracePeriod=30 Dec 04 17:52:13 crc kubenswrapper[4631]: I1204 17:52:13.556644 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7b99dd8d64-9nrvl" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon" containerID="cri-o://ea0539fbc919798993abcd7146bb0239a2e44ab54442e650d0342a83e2706f53" gracePeriod=30 Dec 04 17:52:13 crc kubenswrapper[4631]: I1204 17:52:13.580322 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-vkqmx" podStartSLOduration=2.102098279 podStartE2EDuration="14.580304217s" podCreationTimestamp="2025-12-04 17:51:59 +0000 UTC" firstStartedPulling="2025-12-04 17:52:00.554219884 +0000 UTC m=+1450.586461882" lastFinishedPulling="2025-12-04 17:52:13.032425822 +0000 UTC m=+1463.064667820" observedRunningTime="2025-12-04 17:52:13.576212633 +0000 UTC m=+1463.608454631" watchObservedRunningTime="2025-12-04 17:52:13.580304217 +0000 UTC m=+1463.612546215" Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.582669 4631 generic.go:334] "Generic (PLEG): container finished" podID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerID="da48d0a51468aa9ee3e474d03cbf270ae00fc8cfcab58e49cda2cba9ea406e99" exitCode=137 Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.583510 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5874bbe7-300c-47c7-8273-dd48c0572ff8","Type":"ContainerDied","Data":"da48d0a51468aa9ee3e474d03cbf270ae00fc8cfcab58e49cda2cba9ea406e99"} Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.671286 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.792043 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-sg-core-conf-yaml\") pod \"5874bbe7-300c-47c7-8273-dd48c0572ff8\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.792175 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5874bbe7-300c-47c7-8273-dd48c0572ff8-log-httpd\") pod \"5874bbe7-300c-47c7-8273-dd48c0572ff8\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.792231 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-config-data\") pod \"5874bbe7-300c-47c7-8273-dd48c0572ff8\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.792253 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-combined-ca-bundle\") pod \"5874bbe7-300c-47c7-8273-dd48c0572ff8\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.792277 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5874bbe7-300c-47c7-8273-dd48c0572ff8-run-httpd\") pod \"5874bbe7-300c-47c7-8273-dd48c0572ff8\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.792360 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-scripts\") pod \"5874bbe7-300c-47c7-8273-dd48c0572ff8\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.792416 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fj4gk\" (UniqueName: \"kubernetes.io/projected/5874bbe7-300c-47c7-8273-dd48c0572ff8-kube-api-access-fj4gk\") pod \"5874bbe7-300c-47c7-8273-dd48c0572ff8\" (UID: \"5874bbe7-300c-47c7-8273-dd48c0572ff8\") " Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.801820 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5874bbe7-300c-47c7-8273-dd48c0572ff8-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5874bbe7-300c-47c7-8273-dd48c0572ff8" (UID: "5874bbe7-300c-47c7-8273-dd48c0572ff8"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.806199 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5874bbe7-300c-47c7-8273-dd48c0572ff8-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5874bbe7-300c-47c7-8273-dd48c0572ff8" (UID: "5874bbe7-300c-47c7-8273-dd48c0572ff8"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.824546 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5874bbe7-300c-47c7-8273-dd48c0572ff8-kube-api-access-fj4gk" (OuterVolumeSpecName: "kube-api-access-fj4gk") pod "5874bbe7-300c-47c7-8273-dd48c0572ff8" (UID: "5874bbe7-300c-47c7-8273-dd48c0572ff8"). InnerVolumeSpecName "kube-api-access-fj4gk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.824627 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-scripts" (OuterVolumeSpecName: "scripts") pod "5874bbe7-300c-47c7-8273-dd48c0572ff8" (UID: "5874bbe7-300c-47c7-8273-dd48c0572ff8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.837061 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5874bbe7-300c-47c7-8273-dd48c0572ff8" (UID: "5874bbe7-300c-47c7-8273-dd48c0572ff8"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.895196 4631 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5874bbe7-300c-47c7-8273-dd48c0572ff8-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.895284 4631 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5874bbe7-300c-47c7-8273-dd48c0572ff8-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.895348 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.895363 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fj4gk\" (UniqueName: \"kubernetes.io/projected/5874bbe7-300c-47c7-8273-dd48c0572ff8-kube-api-access-fj4gk\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.895393 4631 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.907776 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5874bbe7-300c-47c7-8273-dd48c0572ff8" (UID: "5874bbe7-300c-47c7-8273-dd48c0572ff8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.915175 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-config-data" (OuterVolumeSpecName: "config-data") pod "5874bbe7-300c-47c7-8273-dd48c0572ff8" (UID: "5874bbe7-300c-47c7-8273-dd48c0572ff8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.996711 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:14 crc kubenswrapper[4631]: I1204 17:52:14.996737 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5874bbe7-300c-47c7-8273-dd48c0572ff8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.593258 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5874bbe7-300c-47c7-8273-dd48c0572ff8","Type":"ContainerDied","Data":"308fa095474135faa7ad737dccc1e0bc3e03405f95497687e68ae8d2763967e7"} Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.593311 4631 scope.go:117] "RemoveContainer" containerID="da48d0a51468aa9ee3e474d03cbf270ae00fc8cfcab58e49cda2cba9ea406e99" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.593328 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.614333 4631 scope.go:117] "RemoveContainer" containerID="5d23a814fa7dc32e18bd6fa7518d1569d214ddd1d4b3cac555630948b67c60af" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.646850 4631 scope.go:117] "RemoveContainer" containerID="b2a4310b414e01a09133eb24ee895996e68b35e09da929bf646021704116489c" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.647389 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.662644 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.677476 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:52:15 crc kubenswrapper[4631]: E1204 17:52:15.677864 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerName="sg-core" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.677883 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerName="sg-core" Dec 04 17:52:15 crc kubenswrapper[4631]: E1204 17:52:15.677906 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerName="ceilometer-notification-agent" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.677914 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerName="ceilometer-notification-agent" Dec 04 17:52:15 crc kubenswrapper[4631]: E1204 17:52:15.677926 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerName="ceilometer-central-agent" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.677932 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerName="ceilometer-central-agent" Dec 04 17:52:15 crc kubenswrapper[4631]: E1204 17:52:15.677938 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerName="proxy-httpd" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.677944 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerName="proxy-httpd" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.678116 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerName="sg-core" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.678129 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerName="ceilometer-notification-agent" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.678139 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerName="proxy-httpd" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.678149 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="5874bbe7-300c-47c7-8273-dd48c0572ff8" containerName="ceilometer-central-agent" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.681247 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.689301 4631 scope.go:117] "RemoveContainer" containerID="97fe0c5df319438c37a01746a9ae702a3fb3237773d4313f763c78780a0b7073" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.712177 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.712452 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.715420 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.718091 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.811477 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.811537 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ht6c8\" (UniqueName: \"kubernetes.io/projected/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-kube-api-access-ht6c8\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.811606 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.811659 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-run-httpd\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.811688 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-config-data\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.811724 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-log-httpd\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.811743 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-scripts\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.811928 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.913292 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-scripts\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.913359 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.913423 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.913456 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ht6c8\" (UniqueName: \"kubernetes.io/projected/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-kube-api-access-ht6c8\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.913535 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.913570 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-run-httpd\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.913602 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-config-data\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.913647 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-log-httpd\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.914059 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-log-httpd\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.914351 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-run-httpd\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.932273 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-config-data\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.934586 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-scripts\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.935549 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.935598 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.936102 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:15 crc kubenswrapper[4631]: I1204 17:52:15.937902 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ht6c8\" (UniqueName: \"kubernetes.io/projected/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-kube-api-access-ht6c8\") pod \"ceilometer-0\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " pod="openstack/ceilometer-0" Dec 04 17:52:16 crc kubenswrapper[4631]: I1204 17:52:16.037648 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:52:16 crc kubenswrapper[4631]: I1204 17:52:16.259568 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5874bbe7-300c-47c7-8273-dd48c0572ff8" path="/var/lib/kubelet/pods/5874bbe7-300c-47c7-8273-dd48c0572ff8/volumes" Dec 04 17:52:16 crc kubenswrapper[4631]: I1204 17:52:16.664742 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:52:17 crc kubenswrapper[4631]: I1204 17:52:17.615827 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8","Type":"ContainerStarted","Data":"9a2b06f413781865c10dfd1ec9e6cd915c400f84118b99ba9554b2aace835f9d"} Dec 04 17:52:17 crc kubenswrapper[4631]: I1204 17:52:17.616160 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8","Type":"ContainerStarted","Data":"8805e79ea261b75114b0326ea6d2e23f321d94d0035d4a04ee7f9bad43a06e95"} Dec 04 17:52:18 crc kubenswrapper[4631]: I1204 17:52:18.633325 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8","Type":"ContainerStarted","Data":"470b92e6fd056880808bba178ace4380fa4e6cebc298a51ec6ab32ec94937d7b"} Dec 04 17:52:19 crc kubenswrapper[4631]: I1204 17:52:19.645530 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8","Type":"ContainerStarted","Data":"1a87ed04e5042c2a89223b7190dbb9b838165f7c8d52d7dd3ae16f334400862b"} Dec 04 17:52:21 crc kubenswrapper[4631]: I1204 17:52:21.662014 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8","Type":"ContainerStarted","Data":"a2352c06307bcbad4567dea0ba5b36e096568d3ecf5b02c316e5a09300d50576"} Dec 04 17:52:21 crc kubenswrapper[4631]: I1204 17:52:21.663609 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 04 17:52:21 crc kubenswrapper[4631]: I1204 17:52:21.686948 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.63405368 podStartE2EDuration="6.686927439s" podCreationTimestamp="2025-12-04 17:52:15 +0000 UTC" firstStartedPulling="2025-12-04 17:52:16.672075227 +0000 UTC m=+1466.704317225" lastFinishedPulling="2025-12-04 17:52:20.724948996 +0000 UTC m=+1470.757190984" observedRunningTime="2025-12-04 17:52:21.683474044 +0000 UTC m=+1471.715716052" watchObservedRunningTime="2025-12-04 17:52:21.686927439 +0000 UTC m=+1471.719169437" Dec 04 17:52:22 crc kubenswrapper[4631]: I1204 17:52:22.875356 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:52:28 crc kubenswrapper[4631]: I1204 17:52:28.725121 4631 generic.go:334] "Generic (PLEG): container finished" podID="d3de6752-29b4-433b-8dcc-3237237aec3b" containerID="3b71d7ebbfd54a77a51efd85d5e362b36f3a7f7200c00dc8c9e6999b98e07e0f" exitCode=0 Dec 04 17:52:28 crc kubenswrapper[4631]: I1204 17:52:28.725741 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vkqmx" event={"ID":"d3de6752-29b4-433b-8dcc-3237237aec3b","Type":"ContainerDied","Data":"3b71d7ebbfd54a77a51efd85d5e362b36f3a7f7200c00dc8c9e6999b98e07e0f"} Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.140663 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vkqmx" Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.316211 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3de6752-29b4-433b-8dcc-3237237aec3b-scripts\") pod \"d3de6752-29b4-433b-8dcc-3237237aec3b\" (UID: \"d3de6752-29b4-433b-8dcc-3237237aec3b\") " Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.316279 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3de6752-29b4-433b-8dcc-3237237aec3b-config-data\") pod \"d3de6752-29b4-433b-8dcc-3237237aec3b\" (UID: \"d3de6752-29b4-433b-8dcc-3237237aec3b\") " Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.316357 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3de6752-29b4-433b-8dcc-3237237aec3b-combined-ca-bundle\") pod \"d3de6752-29b4-433b-8dcc-3237237aec3b\" (UID: \"d3de6752-29b4-433b-8dcc-3237237aec3b\") " Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.317252 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-567k4\" (UniqueName: \"kubernetes.io/projected/d3de6752-29b4-433b-8dcc-3237237aec3b-kube-api-access-567k4\") pod \"d3de6752-29b4-433b-8dcc-3237237aec3b\" (UID: \"d3de6752-29b4-433b-8dcc-3237237aec3b\") " Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.325114 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3de6752-29b4-433b-8dcc-3237237aec3b-kube-api-access-567k4" (OuterVolumeSpecName: "kube-api-access-567k4") pod "d3de6752-29b4-433b-8dcc-3237237aec3b" (UID: "d3de6752-29b4-433b-8dcc-3237237aec3b"). InnerVolumeSpecName "kube-api-access-567k4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.328576 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3de6752-29b4-433b-8dcc-3237237aec3b-scripts" (OuterVolumeSpecName: "scripts") pod "d3de6752-29b4-433b-8dcc-3237237aec3b" (UID: "d3de6752-29b4-433b-8dcc-3237237aec3b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.356251 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3de6752-29b4-433b-8dcc-3237237aec3b-config-data" (OuterVolumeSpecName: "config-data") pod "d3de6752-29b4-433b-8dcc-3237237aec3b" (UID: "d3de6752-29b4-433b-8dcc-3237237aec3b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.367098 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3de6752-29b4-433b-8dcc-3237237aec3b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d3de6752-29b4-433b-8dcc-3237237aec3b" (UID: "d3de6752-29b4-433b-8dcc-3237237aec3b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.419763 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3de6752-29b4-433b-8dcc-3237237aec3b-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.419821 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3de6752-29b4-433b-8dcc-3237237aec3b-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.419834 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3de6752-29b4-433b-8dcc-3237237aec3b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.419849 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-567k4\" (UniqueName: \"kubernetes.io/projected/d3de6752-29b4-433b-8dcc-3237237aec3b-kube-api-access-567k4\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.745550 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vkqmx" event={"ID":"d3de6752-29b4-433b-8dcc-3237237aec3b","Type":"ContainerDied","Data":"9491acd1151432febf8c28e32a9df117d8340b3e094766c206fcff2d70a81dec"} Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.745589 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9491acd1151432febf8c28e32a9df117d8340b3e094766c206fcff2d70a81dec" Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.745643 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vkqmx" Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.882544 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 04 17:52:30 crc kubenswrapper[4631]: E1204 17:52:30.883051 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3de6752-29b4-433b-8dcc-3237237aec3b" containerName="nova-cell0-conductor-db-sync" Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.883070 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3de6752-29b4-433b-8dcc-3237237aec3b" containerName="nova-cell0-conductor-db-sync" Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.883324 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3de6752-29b4-433b-8dcc-3237237aec3b" containerName="nova-cell0-conductor-db-sync" Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.884125 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.887542 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.897007 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-99lk4" Dec 04 17:52:30 crc kubenswrapper[4631]: I1204 17:52:30.897427 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 04 17:52:31 crc kubenswrapper[4631]: I1204 17:52:31.028205 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d165be1-162b-40cb-96fa-e66a846f3966-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"3d165be1-162b-40cb-96fa-e66a846f3966\") " pod="openstack/nova-cell0-conductor-0" Dec 04 17:52:31 crc kubenswrapper[4631]: I1204 17:52:31.028388 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fn5bf\" (UniqueName: \"kubernetes.io/projected/3d165be1-162b-40cb-96fa-e66a846f3966-kube-api-access-fn5bf\") pod \"nova-cell0-conductor-0\" (UID: \"3d165be1-162b-40cb-96fa-e66a846f3966\") " pod="openstack/nova-cell0-conductor-0" Dec 04 17:52:31 crc kubenswrapper[4631]: I1204 17:52:31.028481 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d165be1-162b-40cb-96fa-e66a846f3966-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"3d165be1-162b-40cb-96fa-e66a846f3966\") " pod="openstack/nova-cell0-conductor-0" Dec 04 17:52:31 crc kubenswrapper[4631]: I1204 17:52:31.130348 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fn5bf\" (UniqueName: \"kubernetes.io/projected/3d165be1-162b-40cb-96fa-e66a846f3966-kube-api-access-fn5bf\") pod \"nova-cell0-conductor-0\" (UID: \"3d165be1-162b-40cb-96fa-e66a846f3966\") " pod="openstack/nova-cell0-conductor-0" Dec 04 17:52:31 crc kubenswrapper[4631]: I1204 17:52:31.130466 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d165be1-162b-40cb-96fa-e66a846f3966-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"3d165be1-162b-40cb-96fa-e66a846f3966\") " pod="openstack/nova-cell0-conductor-0" Dec 04 17:52:31 crc kubenswrapper[4631]: I1204 17:52:31.130521 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d165be1-162b-40cb-96fa-e66a846f3966-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"3d165be1-162b-40cb-96fa-e66a846f3966\") " pod="openstack/nova-cell0-conductor-0" Dec 04 17:52:31 crc kubenswrapper[4631]: I1204 17:52:31.137033 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d165be1-162b-40cb-96fa-e66a846f3966-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"3d165be1-162b-40cb-96fa-e66a846f3966\") " pod="openstack/nova-cell0-conductor-0" Dec 04 17:52:31 crc kubenswrapper[4631]: I1204 17:52:31.145798 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d165be1-162b-40cb-96fa-e66a846f3966-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"3d165be1-162b-40cb-96fa-e66a846f3966\") " pod="openstack/nova-cell0-conductor-0" Dec 04 17:52:31 crc kubenswrapper[4631]: I1204 17:52:31.154164 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fn5bf\" (UniqueName: \"kubernetes.io/projected/3d165be1-162b-40cb-96fa-e66a846f3966-kube-api-access-fn5bf\") pod \"nova-cell0-conductor-0\" (UID: \"3d165be1-162b-40cb-96fa-e66a846f3966\") " pod="openstack/nova-cell0-conductor-0" Dec 04 17:52:31 crc kubenswrapper[4631]: I1204 17:52:31.202731 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 04 17:52:31 crc kubenswrapper[4631]: I1204 17:52:31.472656 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 17:52:31 crc kubenswrapper[4631]: I1204 17:52:31.473184 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="f065ae96-43d1-4d55-824e-76922d68ddc9" containerName="glance-log" containerID="cri-o://6cd0ebbd21921f22396c0a9558824a76a6b41a1003b0bf57d4fecb86a249ce50" gracePeriod=30 Dec 04 17:52:31 crc kubenswrapper[4631]: I1204 17:52:31.473322 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="f065ae96-43d1-4d55-824e-76922d68ddc9" containerName="glance-httpd" containerID="cri-o://1540969c51c3402e1f5a9a53302b64eaba6daf28ffa8dd622ae4637919a90d1e" gracePeriod=30 Dec 04 17:52:31 crc kubenswrapper[4631]: I1204 17:52:31.800191 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 04 17:52:31 crc kubenswrapper[4631]: W1204 17:52:31.803733 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d165be1_162b_40cb_96fa_e66a846f3966.slice/crio-3077b6d9d55ca794c2dcbeca87b4d9c7cec4eda9e2bf0328c6c175a57f661def WatchSource:0}: Error finding container 3077b6d9d55ca794c2dcbeca87b4d9c7cec4eda9e2bf0328c6c175a57f661def: Status 404 returned error can't find the container with id 3077b6d9d55ca794c2dcbeca87b4d9c7cec4eda9e2bf0328c6c175a57f661def Dec 04 17:52:32 crc kubenswrapper[4631]: I1204 17:52:32.765548 4631 generic.go:334] "Generic (PLEG): container finished" podID="f065ae96-43d1-4d55-824e-76922d68ddc9" containerID="6cd0ebbd21921f22396c0a9558824a76a6b41a1003b0bf57d4fecb86a249ce50" exitCode=143 Dec 04 17:52:32 crc kubenswrapper[4631]: I1204 17:52:32.765642 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f065ae96-43d1-4d55-824e-76922d68ddc9","Type":"ContainerDied","Data":"6cd0ebbd21921f22396c0a9558824a76a6b41a1003b0bf57d4fecb86a249ce50"} Dec 04 17:52:32 crc kubenswrapper[4631]: I1204 17:52:32.767628 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"3d165be1-162b-40cb-96fa-e66a846f3966","Type":"ContainerStarted","Data":"0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b"} Dec 04 17:52:32 crc kubenswrapper[4631]: I1204 17:52:32.767667 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"3d165be1-162b-40cb-96fa-e66a846f3966","Type":"ContainerStarted","Data":"3077b6d9d55ca794c2dcbeca87b4d9c7cec4eda9e2bf0328c6c175a57f661def"} Dec 04 17:52:32 crc kubenswrapper[4631]: I1204 17:52:32.767806 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 04 17:52:32 crc kubenswrapper[4631]: I1204 17:52:32.792800 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.792783798 podStartE2EDuration="2.792783798s" podCreationTimestamp="2025-12-04 17:52:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:52:32.787973815 +0000 UTC m=+1482.820215813" watchObservedRunningTime="2025-12-04 17:52:32.792783798 +0000 UTC m=+1482.825025796" Dec 04 17:52:32 crc kubenswrapper[4631]: I1204 17:52:32.884145 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 04 17:52:32 crc kubenswrapper[4631]: I1204 17:52:32.931184 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 17:52:32 crc kubenswrapper[4631]: I1204 17:52:32.931443 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="cb490f12-7a53-4fbd-a994-1c0ae225f253" containerName="glance-log" containerID="cri-o://bb62f1d046e85e252f53059ea99a0f32bee50f63379d75f37462c755d4e11913" gracePeriod=30 Dec 04 17:52:32 crc kubenswrapper[4631]: I1204 17:52:32.931553 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="cb490f12-7a53-4fbd-a994-1c0ae225f253" containerName="glance-httpd" containerID="cri-o://711e5e47379a65f306ba54cfebf210735b505aacea97f4f8139a420869205675" gracePeriod=30 Dec 04 17:52:33 crc kubenswrapper[4631]: I1204 17:52:33.789948 4631 generic.go:334] "Generic (PLEG): container finished" podID="cb490f12-7a53-4fbd-a994-1c0ae225f253" containerID="bb62f1d046e85e252f53059ea99a0f32bee50f63379d75f37462c755d4e11913" exitCode=143 Dec 04 17:52:33 crc kubenswrapper[4631]: I1204 17:52:33.790942 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cb490f12-7a53-4fbd-a994-1c0ae225f253","Type":"ContainerDied","Data":"bb62f1d046e85e252f53059ea99a0f32bee50f63379d75f37462c755d4e11913"} Dec 04 17:52:34 crc kubenswrapper[4631]: I1204 17:52:34.695284 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:52:34 crc kubenswrapper[4631]: I1204 17:52:34.695763 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerName="ceilometer-central-agent" containerID="cri-o://9a2b06f413781865c10dfd1ec9e6cd915c400f84118b99ba9554b2aace835f9d" gracePeriod=30 Dec 04 17:52:34 crc kubenswrapper[4631]: I1204 17:52:34.695796 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerName="sg-core" containerID="cri-o://1a87ed04e5042c2a89223b7190dbb9b838165f7c8d52d7dd3ae16f334400862b" gracePeriod=30 Dec 04 17:52:34 crc kubenswrapper[4631]: I1204 17:52:34.695838 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerName="ceilometer-notification-agent" containerID="cri-o://470b92e6fd056880808bba178ace4380fa4e6cebc298a51ec6ab32ec94937d7b" gracePeriod=30 Dec 04 17:52:34 crc kubenswrapper[4631]: I1204 17:52:34.695908 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerName="proxy-httpd" containerID="cri-o://a2352c06307bcbad4567dea0ba5b36e096568d3ecf5b02c316e5a09300d50576" gracePeriod=30 Dec 04 17:52:34 crc kubenswrapper[4631]: I1204 17:52:34.702144 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.179:3000/\": EOF" Dec 04 17:52:34 crc kubenswrapper[4631]: I1204 17:52:34.800674 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="3d165be1-162b-40cb-96fa-e66a846f3966" containerName="nova-cell0-conductor-conductor" containerID="cri-o://0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b" gracePeriod=30 Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.634456 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.742578 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f065ae96-43d1-4d55-824e-76922d68ddc9-logs\") pod \"f065ae96-43d1-4d55-824e-76922d68ddc9\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.742622 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-config-data\") pod \"f065ae96-43d1-4d55-824e-76922d68ddc9\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.742652 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-public-tls-certs\") pod \"f065ae96-43d1-4d55-824e-76922d68ddc9\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.742684 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-combined-ca-bundle\") pod \"f065ae96-43d1-4d55-824e-76922d68ddc9\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.742720 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f065ae96-43d1-4d55-824e-76922d68ddc9-httpd-run\") pod \"f065ae96-43d1-4d55-824e-76922d68ddc9\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.742743 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-scripts\") pod \"f065ae96-43d1-4d55-824e-76922d68ddc9\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.742786 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c996k\" (UniqueName: \"kubernetes.io/projected/f065ae96-43d1-4d55-824e-76922d68ddc9-kube-api-access-c996k\") pod \"f065ae96-43d1-4d55-824e-76922d68ddc9\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.742808 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"f065ae96-43d1-4d55-824e-76922d68ddc9\" (UID: \"f065ae96-43d1-4d55-824e-76922d68ddc9\") " Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.749348 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "f065ae96-43d1-4d55-824e-76922d68ddc9" (UID: "f065ae96-43d1-4d55-824e-76922d68ddc9"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.749691 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f065ae96-43d1-4d55-824e-76922d68ddc9-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f065ae96-43d1-4d55-824e-76922d68ddc9" (UID: "f065ae96-43d1-4d55-824e-76922d68ddc9"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.749787 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-scripts" (OuterVolumeSpecName: "scripts") pod "f065ae96-43d1-4d55-824e-76922d68ddc9" (UID: "f065ae96-43d1-4d55-824e-76922d68ddc9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.752750 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f065ae96-43d1-4d55-824e-76922d68ddc9-logs" (OuterVolumeSpecName: "logs") pod "f065ae96-43d1-4d55-824e-76922d68ddc9" (UID: "f065ae96-43d1-4d55-824e-76922d68ddc9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.754598 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f065ae96-43d1-4d55-824e-76922d68ddc9-kube-api-access-c996k" (OuterVolumeSpecName: "kube-api-access-c996k") pod "f065ae96-43d1-4d55-824e-76922d68ddc9" (UID: "f065ae96-43d1-4d55-824e-76922d68ddc9"). InnerVolumeSpecName "kube-api-access-c996k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.779079 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f065ae96-43d1-4d55-824e-76922d68ddc9" (UID: "f065ae96-43d1-4d55-824e-76922d68ddc9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.803313 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f065ae96-43d1-4d55-824e-76922d68ddc9" (UID: "f065ae96-43d1-4d55-824e-76922d68ddc9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.803533 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-config-data" (OuterVolumeSpecName: "config-data") pod "f065ae96-43d1-4d55-824e-76922d68ddc9" (UID: "f065ae96-43d1-4d55-824e-76922d68ddc9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.810573 4631 generic.go:334] "Generic (PLEG): container finished" podID="f065ae96-43d1-4d55-824e-76922d68ddc9" containerID="1540969c51c3402e1f5a9a53302b64eaba6daf28ffa8dd622ae4637919a90d1e" exitCode=0 Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.810629 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f065ae96-43d1-4d55-824e-76922d68ddc9","Type":"ContainerDied","Data":"1540969c51c3402e1f5a9a53302b64eaba6daf28ffa8dd622ae4637919a90d1e"} Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.810655 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f065ae96-43d1-4d55-824e-76922d68ddc9","Type":"ContainerDied","Data":"4477f7849ec9809bf8ae543570e7f087b1c186c080c9d4f14a193495e37fd43d"} Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.810670 4631 scope.go:117] "RemoveContainer" containerID="1540969c51c3402e1f5a9a53302b64eaba6daf28ffa8dd622ae4637919a90d1e" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.810782 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.834427 4631 generic.go:334] "Generic (PLEG): container finished" podID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerID="a2352c06307bcbad4567dea0ba5b36e096568d3ecf5b02c316e5a09300d50576" exitCode=0 Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.834472 4631 generic.go:334] "Generic (PLEG): container finished" podID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerID="1a87ed04e5042c2a89223b7190dbb9b838165f7c8d52d7dd3ae16f334400862b" exitCode=2 Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.834482 4631 generic.go:334] "Generic (PLEG): container finished" podID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerID="9a2b06f413781865c10dfd1ec9e6cd915c400f84118b99ba9554b2aace835f9d" exitCode=0 Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.834503 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8","Type":"ContainerDied","Data":"a2352c06307bcbad4567dea0ba5b36e096568d3ecf5b02c316e5a09300d50576"} Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.834532 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8","Type":"ContainerDied","Data":"1a87ed04e5042c2a89223b7190dbb9b838165f7c8d52d7dd3ae16f334400862b"} Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.834543 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8","Type":"ContainerDied","Data":"9a2b06f413781865c10dfd1ec9e6cd915c400f84118b99ba9554b2aace835f9d"} Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.844453 4631 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.844491 4631 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f065ae96-43d1-4d55-824e-76922d68ddc9-logs\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.844501 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.844513 4631 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.844522 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.844531 4631 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f065ae96-43d1-4d55-824e-76922d68ddc9-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.844538 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f065ae96-43d1-4d55-824e-76922d68ddc9-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.844545 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c996k\" (UniqueName: \"kubernetes.io/projected/f065ae96-43d1-4d55-824e-76922d68ddc9-kube-api-access-c996k\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.871866 4631 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.883422 4631 scope.go:117] "RemoveContainer" containerID="6cd0ebbd21921f22396c0a9558824a76a6b41a1003b0bf57d4fecb86a249ce50" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.890078 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.902744 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.907618 4631 scope.go:117] "RemoveContainer" containerID="1540969c51c3402e1f5a9a53302b64eaba6daf28ffa8dd622ae4637919a90d1e" Dec 04 17:52:35 crc kubenswrapper[4631]: E1204 17:52:35.909769 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1540969c51c3402e1f5a9a53302b64eaba6daf28ffa8dd622ae4637919a90d1e\": container with ID starting with 1540969c51c3402e1f5a9a53302b64eaba6daf28ffa8dd622ae4637919a90d1e not found: ID does not exist" containerID="1540969c51c3402e1f5a9a53302b64eaba6daf28ffa8dd622ae4637919a90d1e" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.909813 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1540969c51c3402e1f5a9a53302b64eaba6daf28ffa8dd622ae4637919a90d1e"} err="failed to get container status \"1540969c51c3402e1f5a9a53302b64eaba6daf28ffa8dd622ae4637919a90d1e\": rpc error: code = NotFound desc = could not find container \"1540969c51c3402e1f5a9a53302b64eaba6daf28ffa8dd622ae4637919a90d1e\": container with ID starting with 1540969c51c3402e1f5a9a53302b64eaba6daf28ffa8dd622ae4637919a90d1e not found: ID does not exist" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.909837 4631 scope.go:117] "RemoveContainer" containerID="6cd0ebbd21921f22396c0a9558824a76a6b41a1003b0bf57d4fecb86a249ce50" Dec 04 17:52:35 crc kubenswrapper[4631]: E1204 17:52:35.910351 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cd0ebbd21921f22396c0a9558824a76a6b41a1003b0bf57d4fecb86a249ce50\": container with ID starting with 6cd0ebbd21921f22396c0a9558824a76a6b41a1003b0bf57d4fecb86a249ce50 not found: ID does not exist" containerID="6cd0ebbd21921f22396c0a9558824a76a6b41a1003b0bf57d4fecb86a249ce50" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.910395 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cd0ebbd21921f22396c0a9558824a76a6b41a1003b0bf57d4fecb86a249ce50"} err="failed to get container status \"6cd0ebbd21921f22396c0a9558824a76a6b41a1003b0bf57d4fecb86a249ce50\": rpc error: code = NotFound desc = could not find container \"6cd0ebbd21921f22396c0a9558824a76a6b41a1003b0bf57d4fecb86a249ce50\": container with ID starting with 6cd0ebbd21921f22396c0a9558824a76a6b41a1003b0bf57d4fecb86a249ce50 not found: ID does not exist" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.919836 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 17:52:35 crc kubenswrapper[4631]: E1204 17:52:35.920249 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f065ae96-43d1-4d55-824e-76922d68ddc9" containerName="glance-httpd" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.920271 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="f065ae96-43d1-4d55-824e-76922d68ddc9" containerName="glance-httpd" Dec 04 17:52:35 crc kubenswrapper[4631]: E1204 17:52:35.920290 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f065ae96-43d1-4d55-824e-76922d68ddc9" containerName="glance-log" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.920300 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="f065ae96-43d1-4d55-824e-76922d68ddc9" containerName="glance-log" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.920491 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="f065ae96-43d1-4d55-824e-76922d68ddc9" containerName="glance-httpd" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.920517 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="f065ae96-43d1-4d55-824e-76922d68ddc9" containerName="glance-log" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.921542 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.923879 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.924067 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.947387 4631 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:35 crc kubenswrapper[4631]: I1204 17:52:35.952304 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.022638 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.022872 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.049112 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f61006f-d20d-43ed-94d6-95615925184f-config-data\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.049160 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f61006f-d20d-43ed-94d6-95615925184f-scripts\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.049183 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7f61006f-d20d-43ed-94d6-95615925184f-logs\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.049219 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f61006f-d20d-43ed-94d6-95615925184f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.049262 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7f61006f-d20d-43ed-94d6-95615925184f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.049323 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f61006f-d20d-43ed-94d6-95615925184f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.049340 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jq5km\" (UniqueName: \"kubernetes.io/projected/7f61006f-d20d-43ed-94d6-95615925184f-kube-api-access-jq5km\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.049358 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.150794 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f61006f-d20d-43ed-94d6-95615925184f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.150885 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7f61006f-d20d-43ed-94d6-95615925184f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.150977 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f61006f-d20d-43ed-94d6-95615925184f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.151000 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jq5km\" (UniqueName: \"kubernetes.io/projected/7f61006f-d20d-43ed-94d6-95615925184f-kube-api-access-jq5km\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.151025 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.151355 4631 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.151858 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f61006f-d20d-43ed-94d6-95615925184f-config-data\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.152026 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f61006f-d20d-43ed-94d6-95615925184f-scripts\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.152126 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7f61006f-d20d-43ed-94d6-95615925184f-logs\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.151869 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7f61006f-d20d-43ed-94d6-95615925184f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.152417 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7f61006f-d20d-43ed-94d6-95615925184f-logs\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.155772 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f61006f-d20d-43ed-94d6-95615925184f-scripts\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.155919 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f61006f-d20d-43ed-94d6-95615925184f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.171007 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f61006f-d20d-43ed-94d6-95615925184f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.181726 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f61006f-d20d-43ed-94d6-95615925184f-config-data\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.188688 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jq5km\" (UniqueName: \"kubernetes.io/projected/7f61006f-d20d-43ed-94d6-95615925184f-kube-api-access-jq5km\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.193775 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"7f61006f-d20d-43ed-94d6-95615925184f\") " pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.248481 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.261976 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f065ae96-43d1-4d55-824e-76922d68ddc9" path="/var/lib/kubelet/pods/f065ae96-43d1-4d55-824e-76922d68ddc9/volumes" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.722675 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.850193 4631 generic.go:334] "Generic (PLEG): container finished" podID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerID="470b92e6fd056880808bba178ace4380fa4e6cebc298a51ec6ab32ec94937d7b" exitCode=0 Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.850281 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8","Type":"ContainerDied","Data":"470b92e6fd056880808bba178ace4380fa4e6cebc298a51ec6ab32ec94937d7b"} Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.850315 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8","Type":"ContainerDied","Data":"8805e79ea261b75114b0326ea6d2e23f321d94d0035d4a04ee7f9bad43a06e95"} Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.850339 4631 scope.go:117] "RemoveContainer" containerID="a2352c06307bcbad4567dea0ba5b36e096568d3ecf5b02c316e5a09300d50576" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.850510 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.857263 4631 generic.go:334] "Generic (PLEG): container finished" podID="cb490f12-7a53-4fbd-a994-1c0ae225f253" containerID="711e5e47379a65f306ba54cfebf210735b505aacea97f4f8139a420869205675" exitCode=0 Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.857302 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cb490f12-7a53-4fbd-a994-1c0ae225f253","Type":"ContainerDied","Data":"711e5e47379a65f306ba54cfebf210735b505aacea97f4f8139a420869205675"} Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.869057 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-config-data\") pod \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.869347 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-combined-ca-bundle\") pod \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.869394 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-log-httpd\") pod \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.869443 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ht6c8\" (UniqueName: \"kubernetes.io/projected/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-kube-api-access-ht6c8\") pod \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.869481 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-ceilometer-tls-certs\") pod \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.869517 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-run-httpd\") pod \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.869593 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-scripts\") pod \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.869617 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-sg-core-conf-yaml\") pod \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\" (UID: \"aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8\") " Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.871147 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" (UID: "aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.871613 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" (UID: "aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.892535 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-scripts" (OuterVolumeSpecName: "scripts") pod "aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" (UID: "aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.892830 4631 scope.go:117] "RemoveContainer" containerID="1a87ed04e5042c2a89223b7190dbb9b838165f7c8d52d7dd3ae16f334400862b" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.893308 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-kube-api-access-ht6c8" (OuterVolumeSpecName: "kube-api-access-ht6c8") pod "aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" (UID: "aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8"). InnerVolumeSpecName "kube-api-access-ht6c8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.961957 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.971695 4631 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.971718 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ht6c8\" (UniqueName: \"kubernetes.io/projected/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-kube-api-access-ht6c8\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.971727 4631 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.971735 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:36 crc kubenswrapper[4631]: I1204 17:52:36.971694 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" (UID: "aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:37 crc kubenswrapper[4631]: W1204 17:52:37.015391 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f61006f_d20d_43ed_94d6_95615925184f.slice/crio-509fe3d0142ab35d49c708237e48b41b75752f5ce1daccbdb4a04e304794583b WatchSource:0}: Error finding container 509fe3d0142ab35d49c708237e48b41b75752f5ce1daccbdb4a04e304794583b: Status 404 returned error can't find the container with id 509fe3d0142ab35d49c708237e48b41b75752f5ce1daccbdb4a04e304794583b Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.062256 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" (UID: "aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.076782 4631 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.077392 4631 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.105027 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" (UID: "aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.111616 4631 scope.go:117] "RemoveContainer" containerID="470b92e6fd056880808bba178ace4380fa4e6cebc298a51ec6ab32ec94937d7b" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.185422 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.198432 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-config-data" (OuterVolumeSpecName: "config-data") pod "aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" (UID: "aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.229618 4631 scope.go:117] "RemoveContainer" containerID="9a2b06f413781865c10dfd1ec9e6cd915c400f84118b99ba9554b2aace835f9d" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.264430 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.273053 4631 scope.go:117] "RemoveContainer" containerID="a2352c06307bcbad4567dea0ba5b36e096568d3ecf5b02c316e5a09300d50576" Dec 04 17:52:37 crc kubenswrapper[4631]: E1204 17:52:37.273740 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2352c06307bcbad4567dea0ba5b36e096568d3ecf5b02c316e5a09300d50576\": container with ID starting with a2352c06307bcbad4567dea0ba5b36e096568d3ecf5b02c316e5a09300d50576 not found: ID does not exist" containerID="a2352c06307bcbad4567dea0ba5b36e096568d3ecf5b02c316e5a09300d50576" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.273781 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2352c06307bcbad4567dea0ba5b36e096568d3ecf5b02c316e5a09300d50576"} err="failed to get container status \"a2352c06307bcbad4567dea0ba5b36e096568d3ecf5b02c316e5a09300d50576\": rpc error: code = NotFound desc = could not find container \"a2352c06307bcbad4567dea0ba5b36e096568d3ecf5b02c316e5a09300d50576\": container with ID starting with a2352c06307bcbad4567dea0ba5b36e096568d3ecf5b02c316e5a09300d50576 not found: ID does not exist" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.273818 4631 scope.go:117] "RemoveContainer" containerID="1a87ed04e5042c2a89223b7190dbb9b838165f7c8d52d7dd3ae16f334400862b" Dec 04 17:52:37 crc kubenswrapper[4631]: E1204 17:52:37.274226 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a87ed04e5042c2a89223b7190dbb9b838165f7c8d52d7dd3ae16f334400862b\": container with ID starting with 1a87ed04e5042c2a89223b7190dbb9b838165f7c8d52d7dd3ae16f334400862b not found: ID does not exist" containerID="1a87ed04e5042c2a89223b7190dbb9b838165f7c8d52d7dd3ae16f334400862b" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.274262 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a87ed04e5042c2a89223b7190dbb9b838165f7c8d52d7dd3ae16f334400862b"} err="failed to get container status \"1a87ed04e5042c2a89223b7190dbb9b838165f7c8d52d7dd3ae16f334400862b\": rpc error: code = NotFound desc = could not find container \"1a87ed04e5042c2a89223b7190dbb9b838165f7c8d52d7dd3ae16f334400862b\": container with ID starting with 1a87ed04e5042c2a89223b7190dbb9b838165f7c8d52d7dd3ae16f334400862b not found: ID does not exist" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.274301 4631 scope.go:117] "RemoveContainer" containerID="470b92e6fd056880808bba178ace4380fa4e6cebc298a51ec6ab32ec94937d7b" Dec 04 17:52:37 crc kubenswrapper[4631]: E1204 17:52:37.290483 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"470b92e6fd056880808bba178ace4380fa4e6cebc298a51ec6ab32ec94937d7b\": container with ID starting with 470b92e6fd056880808bba178ace4380fa4e6cebc298a51ec6ab32ec94937d7b not found: ID does not exist" containerID="470b92e6fd056880808bba178ace4380fa4e6cebc298a51ec6ab32ec94937d7b" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.290526 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"470b92e6fd056880808bba178ace4380fa4e6cebc298a51ec6ab32ec94937d7b"} err="failed to get container status \"470b92e6fd056880808bba178ace4380fa4e6cebc298a51ec6ab32ec94937d7b\": rpc error: code = NotFound desc = could not find container \"470b92e6fd056880808bba178ace4380fa4e6cebc298a51ec6ab32ec94937d7b\": container with ID starting with 470b92e6fd056880808bba178ace4380fa4e6cebc298a51ec6ab32ec94937d7b not found: ID does not exist" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.290551 4631 scope.go:117] "RemoveContainer" containerID="9a2b06f413781865c10dfd1ec9e6cd915c400f84118b99ba9554b2aace835f9d" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.291751 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:37 crc kubenswrapper[4631]: E1204 17:52:37.295351 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a2b06f413781865c10dfd1ec9e6cd915c400f84118b99ba9554b2aace835f9d\": container with ID starting with 9a2b06f413781865c10dfd1ec9e6cd915c400f84118b99ba9554b2aace835f9d not found: ID does not exist" containerID="9a2b06f413781865c10dfd1ec9e6cd915c400f84118b99ba9554b2aace835f9d" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.295414 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a2b06f413781865c10dfd1ec9e6cd915c400f84118b99ba9554b2aace835f9d"} err="failed to get container status \"9a2b06f413781865c10dfd1ec9e6cd915c400f84118b99ba9554b2aace835f9d\": rpc error: code = NotFound desc = could not find container \"9a2b06f413781865c10dfd1ec9e6cd915c400f84118b99ba9554b2aace835f9d\": container with ID starting with 9a2b06f413781865c10dfd1ec9e6cd915c400f84118b99ba9554b2aace835f9d not found: ID does not exist" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.393771 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-combined-ca-bundle\") pod \"cb490f12-7a53-4fbd-a994-1c0ae225f253\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.393892 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"cb490f12-7a53-4fbd-a994-1c0ae225f253\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.393918 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-internal-tls-certs\") pod \"cb490f12-7a53-4fbd-a994-1c0ae225f253\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.393955 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-config-data\") pod \"cb490f12-7a53-4fbd-a994-1c0ae225f253\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.393993 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cb490f12-7a53-4fbd-a994-1c0ae225f253-httpd-run\") pod \"cb490f12-7a53-4fbd-a994-1c0ae225f253\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.394068 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-scripts\") pod \"cb490f12-7a53-4fbd-a994-1c0ae225f253\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.394125 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb490f12-7a53-4fbd-a994-1c0ae225f253-logs\") pod \"cb490f12-7a53-4fbd-a994-1c0ae225f253\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.394161 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgpzr\" (UniqueName: \"kubernetes.io/projected/cb490f12-7a53-4fbd-a994-1c0ae225f253-kube-api-access-kgpzr\") pod \"cb490f12-7a53-4fbd-a994-1c0ae225f253\" (UID: \"cb490f12-7a53-4fbd-a994-1c0ae225f253\") " Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.394841 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb490f12-7a53-4fbd-a994-1c0ae225f253-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "cb490f12-7a53-4fbd-a994-1c0ae225f253" (UID: "cb490f12-7a53-4fbd-a994-1c0ae225f253"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.399030 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb490f12-7a53-4fbd-a994-1c0ae225f253-logs" (OuterVolumeSpecName: "logs") pod "cb490f12-7a53-4fbd-a994-1c0ae225f253" (UID: "cb490f12-7a53-4fbd-a994-1c0ae225f253"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.404076 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-scripts" (OuterVolumeSpecName: "scripts") pod "cb490f12-7a53-4fbd-a994-1c0ae225f253" (UID: "cb490f12-7a53-4fbd-a994-1c0ae225f253"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.408816 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "cb490f12-7a53-4fbd-a994-1c0ae225f253" (UID: "cb490f12-7a53-4fbd-a994-1c0ae225f253"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.413274 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb490f12-7a53-4fbd-a994-1c0ae225f253-kube-api-access-kgpzr" (OuterVolumeSpecName: "kube-api-access-kgpzr") pod "cb490f12-7a53-4fbd-a994-1c0ae225f253" (UID: "cb490f12-7a53-4fbd-a994-1c0ae225f253"). InnerVolumeSpecName "kube-api-access-kgpzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.448321 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "cb490f12-7a53-4fbd-a994-1c0ae225f253" (UID: "cb490f12-7a53-4fbd-a994-1c0ae225f253"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.496550 4631 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.496620 4631 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.496636 4631 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cb490f12-7a53-4fbd-a994-1c0ae225f253-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.496649 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.496665 4631 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb490f12-7a53-4fbd-a994-1c0ae225f253-logs\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.496677 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgpzr\" (UniqueName: \"kubernetes.io/projected/cb490f12-7a53-4fbd-a994-1c0ae225f253-kube-api-access-kgpzr\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.516205 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.530455 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cb490f12-7a53-4fbd-a994-1c0ae225f253" (UID: "cb490f12-7a53-4fbd-a994-1c0ae225f253"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.536396 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.538480 4631 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.548572 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-config-data" (OuterVolumeSpecName: "config-data") pod "cb490f12-7a53-4fbd-a994-1c0ae225f253" (UID: "cb490f12-7a53-4fbd-a994-1c0ae225f253"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.552580 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:52:37 crc kubenswrapper[4631]: E1204 17:52:37.553086 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerName="ceilometer-central-agent" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.553101 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerName="ceilometer-central-agent" Dec 04 17:52:37 crc kubenswrapper[4631]: E1204 17:52:37.553115 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb490f12-7a53-4fbd-a994-1c0ae225f253" containerName="glance-httpd" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.553121 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb490f12-7a53-4fbd-a994-1c0ae225f253" containerName="glance-httpd" Dec 04 17:52:37 crc kubenswrapper[4631]: E1204 17:52:37.553131 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb490f12-7a53-4fbd-a994-1c0ae225f253" containerName="glance-log" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.553138 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb490f12-7a53-4fbd-a994-1c0ae225f253" containerName="glance-log" Dec 04 17:52:37 crc kubenswrapper[4631]: E1204 17:52:37.553165 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerName="ceilometer-notification-agent" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.553171 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerName="ceilometer-notification-agent" Dec 04 17:52:37 crc kubenswrapper[4631]: E1204 17:52:37.553184 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerName="sg-core" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.553190 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerName="sg-core" Dec 04 17:52:37 crc kubenswrapper[4631]: E1204 17:52:37.553197 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerName="proxy-httpd" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.553203 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerName="proxy-httpd" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.553611 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerName="ceilometer-central-agent" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.553630 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb490f12-7a53-4fbd-a994-1c0ae225f253" containerName="glance-log" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.553642 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerName="sg-core" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.553649 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerName="ceilometer-notification-agent" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.553668 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" containerName="proxy-httpd" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.553682 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb490f12-7a53-4fbd-a994-1c0ae225f253" containerName="glance-httpd" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.555751 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.561274 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.562060 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.562298 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.565273 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.598084 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.598119 4631 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.598130 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb490f12-7a53-4fbd-a994-1c0ae225f253-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.699475 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dce8f67a-41be-41df-b562-c3ed6bfa64f9-run-httpd\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.699537 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.699590 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-scripts\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.699608 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-config-data\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.699773 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6fp7\" (UniqueName: \"kubernetes.io/projected/dce8f67a-41be-41df-b562-c3ed6bfa64f9-kube-api-access-z6fp7\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.699830 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dce8f67a-41be-41df-b562-c3ed6bfa64f9-log-httpd\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.699901 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.699968 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.801653 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.801790 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dce8f67a-41be-41df-b562-c3ed6bfa64f9-run-httpd\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.801854 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.801896 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-scripts\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.801925 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-config-data\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.801986 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6fp7\" (UniqueName: \"kubernetes.io/projected/dce8f67a-41be-41df-b562-c3ed6bfa64f9-kube-api-access-z6fp7\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.802010 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dce8f67a-41be-41df-b562-c3ed6bfa64f9-log-httpd\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.802045 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.802563 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dce8f67a-41be-41df-b562-c3ed6bfa64f9-run-httpd\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.803341 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dce8f67a-41be-41df-b562-c3ed6bfa64f9-log-httpd\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.806501 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.806656 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-config-data\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.810174 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.811779 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.820355 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-scripts\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.827795 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6fp7\" (UniqueName: \"kubernetes.io/projected/dce8f67a-41be-41df-b562-c3ed6bfa64f9-kube-api-access-z6fp7\") pod \"ceilometer-0\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " pod="openstack/ceilometer-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.870578 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cb490f12-7a53-4fbd-a994-1c0ae225f253","Type":"ContainerDied","Data":"daad094b5838d642c4ef6d67b4c56d0415b9357c25735974b928d6c76d500cf6"} Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.870628 4631 scope.go:117] "RemoveContainer" containerID="711e5e47379a65f306ba54cfebf210735b505aacea97f4f8139a420869205675" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.870742 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.874835 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7f61006f-d20d-43ed-94d6-95615925184f","Type":"ContainerStarted","Data":"509fe3d0142ab35d49c708237e48b41b75752f5ce1daccbdb4a04e304794583b"} Dec 04 17:52:37 crc kubenswrapper[4631]: I1204 17:52:37.883345 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.009055 4631 scope.go:117] "RemoveContainer" containerID="bb62f1d046e85e252f53059ea99a0f32bee50f63379d75f37462c755d4e11913" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.048350 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.075584 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.094148 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.105685 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.114039 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.114691 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.211656 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdgnz\" (UniqueName: \"kubernetes.io/projected/f773d050-d51b-4753-9be0-8f5a91c674bc-kube-api-access-wdgnz\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.211694 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.211783 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f773d050-d51b-4753-9be0-8f5a91c674bc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.212124 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f773d050-d51b-4753-9be0-8f5a91c674bc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.212172 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f773d050-d51b-4753-9be0-8f5a91c674bc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.212212 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f773d050-d51b-4753-9be0-8f5a91c674bc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.212240 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f773d050-d51b-4753-9be0-8f5a91c674bc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.212254 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f773d050-d51b-4753-9be0-8f5a91c674bc-logs\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.314929 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f773d050-d51b-4753-9be0-8f5a91c674bc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.314978 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f773d050-d51b-4753-9be0-8f5a91c674bc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.315024 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f773d050-d51b-4753-9be0-8f5a91c674bc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.315050 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f773d050-d51b-4753-9be0-8f5a91c674bc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.315066 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f773d050-d51b-4753-9be0-8f5a91c674bc-logs\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.315087 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdgnz\" (UniqueName: \"kubernetes.io/projected/f773d050-d51b-4753-9be0-8f5a91c674bc-kube-api-access-wdgnz\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.315106 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.315180 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f773d050-d51b-4753-9be0-8f5a91c674bc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.317256 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8" path="/var/lib/kubelet/pods/aa41b3bb-4ff2-4d37-b51e-8f8a75c92db8/volumes" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.318669 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb490f12-7a53-4fbd-a994-1c0ae225f253" path="/var/lib/kubelet/pods/cb490f12-7a53-4fbd-a994-1c0ae225f253/volumes" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.319384 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.331809 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f773d050-d51b-4753-9be0-8f5a91c674bc-logs\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.332068 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f773d050-d51b-4753-9be0-8f5a91c674bc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.333643 4631 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.342754 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f773d050-d51b-4753-9be0-8f5a91c674bc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.344967 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f773d050-d51b-4753-9be0-8f5a91c674bc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.345570 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f773d050-d51b-4753-9be0-8f5a91c674bc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.352027 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdgnz\" (UniqueName: \"kubernetes.io/projected/f773d050-d51b-4753-9be0-8f5a91c674bc-kube-api-access-wdgnz\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.383382 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f773d050-d51b-4753-9be0-8f5a91c674bc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.532938 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"f773d050-d51b-4753-9be0-8f5a91c674bc\") " pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.640009 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.727995 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.904912 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dce8f67a-41be-41df-b562-c3ed6bfa64f9","Type":"ContainerStarted","Data":"4e0a31b399d7605024711913e76cf1740d97b53da5485164b598578da7cd9b8a"} Dec 04 17:52:38 crc kubenswrapper[4631]: I1204 17:52:38.909355 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7f61006f-d20d-43ed-94d6-95615925184f","Type":"ContainerStarted","Data":"c805a7aa7414891d85389668ac29b8a4df859e53ccda285371395feabba5fe5e"} Dec 04 17:52:39 crc kubenswrapper[4631]: I1204 17:52:39.313921 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 04 17:52:39 crc kubenswrapper[4631]: I1204 17:52:39.939725 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f773d050-d51b-4753-9be0-8f5a91c674bc","Type":"ContainerStarted","Data":"ffa92c5fee2b6410e77af91cce572d73755a2b8ef3ae5951cac988bb8cf32969"} Dec 04 17:52:39 crc kubenswrapper[4631]: I1204 17:52:39.950611 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7f61006f-d20d-43ed-94d6-95615925184f","Type":"ContainerStarted","Data":"38dfc7832325c7a9fabb08ee112178e37ce6f49371250d86872465e21e3986db"} Dec 04 17:52:39 crc kubenswrapper[4631]: I1204 17:52:39.968874 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dce8f67a-41be-41df-b562-c3ed6bfa64f9","Type":"ContainerStarted","Data":"bc9878ef321b33e1587e2d83caae942a951d4649f1511d123b650ac7a4e7f99a"} Dec 04 17:52:39 crc kubenswrapper[4631]: I1204 17:52:39.989498 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.989478501 podStartE2EDuration="4.989478501s" podCreationTimestamp="2025-12-04 17:52:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:52:39.989347017 +0000 UTC m=+1490.021589025" watchObservedRunningTime="2025-12-04 17:52:39.989478501 +0000 UTC m=+1490.021720499" Dec 04 17:52:40 crc kubenswrapper[4631]: I1204 17:52:40.981207 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f773d050-d51b-4753-9be0-8f5a91c674bc","Type":"ContainerStarted","Data":"a99eb59247e055b1350f60fd39ff2fbbad7d8d2439971f1d424180d22f575d48"} Dec 04 17:52:40 crc kubenswrapper[4631]: I1204 17:52:40.981725 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f773d050-d51b-4753-9be0-8f5a91c674bc","Type":"ContainerStarted","Data":"8b27f40622f8375a8dfe1d040bdf02f29829089f2082e9372d30492d4e0467ca"} Dec 04 17:52:40 crc kubenswrapper[4631]: I1204 17:52:40.985405 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dce8f67a-41be-41df-b562-c3ed6bfa64f9","Type":"ContainerStarted","Data":"87a1c288aa47405660efd06fd69bbaa58c9fefc897a3f57e59241674c01c15d3"} Dec 04 17:52:40 crc kubenswrapper[4631]: I1204 17:52:40.985437 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dce8f67a-41be-41df-b562-c3ed6bfa64f9","Type":"ContainerStarted","Data":"ac0ae94d97f0b99afc56faf0356cd55bcc89519d01482264c8fa0d43ea835a34"} Dec 04 17:52:41 crc kubenswrapper[4631]: I1204 17:52:40.999317 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.9993034610000002 podStartE2EDuration="2.999303461s" podCreationTimestamp="2025-12-04 17:52:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:52:40.997526923 +0000 UTC m=+1491.029768921" watchObservedRunningTime="2025-12-04 17:52:40.999303461 +0000 UTC m=+1491.031545449" Dec 04 17:52:41 crc kubenswrapper[4631]: E1204 17:52:41.207511 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 04 17:52:41 crc kubenswrapper[4631]: E1204 17:52:41.208750 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 04 17:52:41 crc kubenswrapper[4631]: E1204 17:52:41.210171 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 04 17:52:41 crc kubenswrapper[4631]: E1204 17:52:41.210283 4631 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="3d165be1-162b-40cb-96fa-e66a846f3966" containerName="nova-cell0-conductor-conductor" Dec 04 17:52:41 crc kubenswrapper[4631]: I1204 17:52:41.996981 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dce8f67a-41be-41df-b562-c3ed6bfa64f9","Type":"ContainerStarted","Data":"37e893a066073a9be291a258223d90cbc5a147bc0b8e857175d4f0424b72d062"} Dec 04 17:52:42 crc kubenswrapper[4631]: I1204 17:52:42.030852 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.065654322 podStartE2EDuration="5.03082758s" podCreationTimestamp="2025-12-04 17:52:37 +0000 UTC" firstStartedPulling="2025-12-04 17:52:38.648960132 +0000 UTC m=+1488.681202130" lastFinishedPulling="2025-12-04 17:52:41.61413339 +0000 UTC m=+1491.646375388" observedRunningTime="2025-12-04 17:52:42.019524543 +0000 UTC m=+1492.051766581" watchObservedRunningTime="2025-12-04 17:52:42.03082758 +0000 UTC m=+1492.063069588" Dec 04 17:52:43 crc kubenswrapper[4631]: I1204 17:52:43.004129 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.017002 4631 generic.go:334] "Generic (PLEG): container finished" podID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerID="ea0539fbc919798993abcd7146bb0239a2e44ab54442e650d0342a83e2706f53" exitCode=137 Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.017355 4631 generic.go:334] "Generic (PLEG): container finished" podID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerID="dd83d7f3d8572b684138f5f3d8a13e79de4098e2db853d6a15e483ce33e834ab" exitCode=137 Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.017077 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b99dd8d64-9nrvl" event={"ID":"a675d52a-03e9-46e8-8b51-4e7f378179cf","Type":"ContainerDied","Data":"ea0539fbc919798993abcd7146bb0239a2e44ab54442e650d0342a83e2706f53"} Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.017430 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b99dd8d64-9nrvl" event={"ID":"a675d52a-03e9-46e8-8b51-4e7f378179cf","Type":"ContainerDied","Data":"dd83d7f3d8572b684138f5f3d8a13e79de4098e2db853d6a15e483ce33e834ab"} Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.017451 4631 scope.go:117] "RemoveContainer" containerID="7c15bbd3b5b3f4435f5df4d6fad19a863e4c5a1e382a6d66758122b5970fd161" Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.465077 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.549630 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a675d52a-03e9-46e8-8b51-4e7f378179cf-horizon-tls-certs\") pod \"a675d52a-03e9-46e8-8b51-4e7f378179cf\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.549695 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a675d52a-03e9-46e8-8b51-4e7f378179cf-scripts\") pod \"a675d52a-03e9-46e8-8b51-4e7f378179cf\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.549762 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h64zn\" (UniqueName: \"kubernetes.io/projected/a675d52a-03e9-46e8-8b51-4e7f378179cf-kube-api-access-h64zn\") pod \"a675d52a-03e9-46e8-8b51-4e7f378179cf\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.549785 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a675d52a-03e9-46e8-8b51-4e7f378179cf-config-data\") pod \"a675d52a-03e9-46e8-8b51-4e7f378179cf\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.549910 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a675d52a-03e9-46e8-8b51-4e7f378179cf-horizon-secret-key\") pod \"a675d52a-03e9-46e8-8b51-4e7f378179cf\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.549944 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a675d52a-03e9-46e8-8b51-4e7f378179cf-combined-ca-bundle\") pod \"a675d52a-03e9-46e8-8b51-4e7f378179cf\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.549964 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a675d52a-03e9-46e8-8b51-4e7f378179cf-logs\") pod \"a675d52a-03e9-46e8-8b51-4e7f378179cf\" (UID: \"a675d52a-03e9-46e8-8b51-4e7f378179cf\") " Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.550884 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a675d52a-03e9-46e8-8b51-4e7f378179cf-logs" (OuterVolumeSpecName: "logs") pod "a675d52a-03e9-46e8-8b51-4e7f378179cf" (UID: "a675d52a-03e9-46e8-8b51-4e7f378179cf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.557301 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a675d52a-03e9-46e8-8b51-4e7f378179cf-kube-api-access-h64zn" (OuterVolumeSpecName: "kube-api-access-h64zn") pod "a675d52a-03e9-46e8-8b51-4e7f378179cf" (UID: "a675d52a-03e9-46e8-8b51-4e7f378179cf"). InnerVolumeSpecName "kube-api-access-h64zn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.560816 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a675d52a-03e9-46e8-8b51-4e7f378179cf-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "a675d52a-03e9-46e8-8b51-4e7f378179cf" (UID: "a675d52a-03e9-46e8-8b51-4e7f378179cf"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.574508 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a675d52a-03e9-46e8-8b51-4e7f378179cf-config-data" (OuterVolumeSpecName: "config-data") pod "a675d52a-03e9-46e8-8b51-4e7f378179cf" (UID: "a675d52a-03e9-46e8-8b51-4e7f378179cf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.581221 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a675d52a-03e9-46e8-8b51-4e7f378179cf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a675d52a-03e9-46e8-8b51-4e7f378179cf" (UID: "a675d52a-03e9-46e8-8b51-4e7f378179cf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.582984 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a675d52a-03e9-46e8-8b51-4e7f378179cf-scripts" (OuterVolumeSpecName: "scripts") pod "a675d52a-03e9-46e8-8b51-4e7f378179cf" (UID: "a675d52a-03e9-46e8-8b51-4e7f378179cf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.622535 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a675d52a-03e9-46e8-8b51-4e7f378179cf-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "a675d52a-03e9-46e8-8b51-4e7f378179cf" (UID: "a675d52a-03e9-46e8-8b51-4e7f378179cf"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.652748 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a675d52a-03e9-46e8-8b51-4e7f378179cf-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.652836 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h64zn\" (UniqueName: \"kubernetes.io/projected/a675d52a-03e9-46e8-8b51-4e7f378179cf-kube-api-access-h64zn\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.652977 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a675d52a-03e9-46e8-8b51-4e7f378179cf-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.652998 4631 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a675d52a-03e9-46e8-8b51-4e7f378179cf-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.653016 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a675d52a-03e9-46e8-8b51-4e7f378179cf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.653032 4631 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a675d52a-03e9-46e8-8b51-4e7f378179cf-logs\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:44 crc kubenswrapper[4631]: I1204 17:52:44.653047 4631 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a675d52a-03e9-46e8-8b51-4e7f378179cf-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 17:52:45 crc kubenswrapper[4631]: I1204 17:52:45.029106 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b99dd8d64-9nrvl" event={"ID":"a675d52a-03e9-46e8-8b51-4e7f378179cf","Type":"ContainerDied","Data":"0186dd6fb04f8834957eea59aaf33053c831eb9b19bf4e58a8658dfb47c240d4"} Dec 04 17:52:45 crc kubenswrapper[4631]: I1204 17:52:45.029151 4631 scope.go:117] "RemoveContainer" containerID="ea0539fbc919798993abcd7146bb0239a2e44ab54442e650d0342a83e2706f53" Dec 04 17:52:45 crc kubenswrapper[4631]: I1204 17:52:45.029267 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b99dd8d64-9nrvl" Dec 04 17:52:45 crc kubenswrapper[4631]: I1204 17:52:45.079047 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7b99dd8d64-9nrvl"] Dec 04 17:52:45 crc kubenswrapper[4631]: I1204 17:52:45.090458 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7b99dd8d64-9nrvl"] Dec 04 17:52:45 crc kubenswrapper[4631]: I1204 17:52:45.219468 4631 scope.go:117] "RemoveContainer" containerID="dd83d7f3d8572b684138f5f3d8a13e79de4098e2db853d6a15e483ce33e834ab" Dec 04 17:52:46 crc kubenswrapper[4631]: E1204 17:52:46.205107 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 04 17:52:46 crc kubenswrapper[4631]: E1204 17:52:46.206470 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 04 17:52:46 crc kubenswrapper[4631]: E1204 17:52:46.207987 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 04 17:52:46 crc kubenswrapper[4631]: E1204 17:52:46.208020 4631 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="3d165be1-162b-40cb-96fa-e66a846f3966" containerName="nova-cell0-conductor-conductor" Dec 04 17:52:46 crc kubenswrapper[4631]: I1204 17:52:46.251196 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" path="/var/lib/kubelet/pods/a675d52a-03e9-46e8-8b51-4e7f378179cf/volumes" Dec 04 17:52:46 crc kubenswrapper[4631]: I1204 17:52:46.251860 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 04 17:52:46 crc kubenswrapper[4631]: I1204 17:52:46.251893 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 04 17:52:46 crc kubenswrapper[4631]: I1204 17:52:46.289961 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 04 17:52:46 crc kubenswrapper[4631]: I1204 17:52:46.298127 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 04 17:52:47 crc kubenswrapper[4631]: I1204 17:52:47.052131 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 04 17:52:47 crc kubenswrapper[4631]: I1204 17:52:47.052763 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 04 17:52:48 crc kubenswrapper[4631]: I1204 17:52:48.728154 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 04 17:52:48 crc kubenswrapper[4631]: I1204 17:52:48.728487 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 04 17:52:48 crc kubenswrapper[4631]: I1204 17:52:48.772864 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 04 17:52:48 crc kubenswrapper[4631]: I1204 17:52:48.774135 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 04 17:52:49 crc kubenswrapper[4631]: I1204 17:52:49.080786 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 04 17:52:49 crc kubenswrapper[4631]: I1204 17:52:49.080818 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 04 17:52:49 crc kubenswrapper[4631]: I1204 17:52:49.168508 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 04 17:52:49 crc kubenswrapper[4631]: I1204 17:52:49.168630 4631 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 17:52:49 crc kubenswrapper[4631]: I1204 17:52:49.174890 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 04 17:52:51 crc kubenswrapper[4631]: I1204 17:52:51.095975 4631 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 17:52:51 crc kubenswrapper[4631]: I1204 17:52:51.097445 4631 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 04 17:52:51 crc kubenswrapper[4631]: E1204 17:52:51.204771 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 04 17:52:51 crc kubenswrapper[4631]: E1204 17:52:51.211399 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 04 17:52:51 crc kubenswrapper[4631]: E1204 17:52:51.213841 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 04 17:52:51 crc kubenswrapper[4631]: E1204 17:52:51.213931 4631 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="3d165be1-162b-40cb-96fa-e66a846f3966" containerName="nova-cell0-conductor-conductor" Dec 04 17:52:51 crc kubenswrapper[4631]: I1204 17:52:51.241781 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 04 17:52:51 crc kubenswrapper[4631]: I1204 17:52:51.247176 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 04 17:52:56 crc kubenswrapper[4631]: E1204 17:52:56.205288 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 04 17:52:56 crc kubenswrapper[4631]: E1204 17:52:56.206830 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 04 17:52:56 crc kubenswrapper[4631]: E1204 17:52:56.208406 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 04 17:52:56 crc kubenswrapper[4631]: E1204 17:52:56.208453 4631 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="3d165be1-162b-40cb-96fa-e66a846f3966" containerName="nova-cell0-conductor-conductor" Dec 04 17:53:01 crc kubenswrapper[4631]: E1204 17:53:01.205768 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 04 17:53:01 crc kubenswrapper[4631]: E1204 17:53:01.209722 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 04 17:53:01 crc kubenswrapper[4631]: E1204 17:53:01.211452 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 04 17:53:01 crc kubenswrapper[4631]: E1204 17:53:01.211502 4631 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="3d165be1-162b-40cb-96fa-e66a846f3966" containerName="nova-cell0-conductor-conductor" Dec 04 17:53:05 crc kubenswrapper[4631]: E1204 17:53:05.103459 4631 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d165be1_162b_40cb_96fa_e66a846f3966.slice/crio-conmon-0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b.scope\": RecentStats: unable to find data in memory cache]" Dec 04 17:53:05 crc kubenswrapper[4631]: I1204 17:53:05.230055 4631 generic.go:334] "Generic (PLEG): container finished" podID="3d165be1-162b-40cb-96fa-e66a846f3966" containerID="0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b" exitCode=137 Dec 04 17:53:05 crc kubenswrapper[4631]: I1204 17:53:05.230098 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"3d165be1-162b-40cb-96fa-e66a846f3966","Type":"ContainerDied","Data":"0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b"} Dec 04 17:53:05 crc kubenswrapper[4631]: I1204 17:53:05.418808 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 04 17:53:05 crc kubenswrapper[4631]: I1204 17:53:05.552622 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d165be1-162b-40cb-96fa-e66a846f3966-combined-ca-bundle\") pod \"3d165be1-162b-40cb-96fa-e66a846f3966\" (UID: \"3d165be1-162b-40cb-96fa-e66a846f3966\") " Dec 04 17:53:05 crc kubenswrapper[4631]: I1204 17:53:05.552695 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fn5bf\" (UniqueName: \"kubernetes.io/projected/3d165be1-162b-40cb-96fa-e66a846f3966-kube-api-access-fn5bf\") pod \"3d165be1-162b-40cb-96fa-e66a846f3966\" (UID: \"3d165be1-162b-40cb-96fa-e66a846f3966\") " Dec 04 17:53:05 crc kubenswrapper[4631]: I1204 17:53:05.552886 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d165be1-162b-40cb-96fa-e66a846f3966-config-data\") pod \"3d165be1-162b-40cb-96fa-e66a846f3966\" (UID: \"3d165be1-162b-40cb-96fa-e66a846f3966\") " Dec 04 17:53:05 crc kubenswrapper[4631]: I1204 17:53:05.558638 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d165be1-162b-40cb-96fa-e66a846f3966-kube-api-access-fn5bf" (OuterVolumeSpecName: "kube-api-access-fn5bf") pod "3d165be1-162b-40cb-96fa-e66a846f3966" (UID: "3d165be1-162b-40cb-96fa-e66a846f3966"). InnerVolumeSpecName "kube-api-access-fn5bf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:53:05 crc kubenswrapper[4631]: I1204 17:53:05.579822 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d165be1-162b-40cb-96fa-e66a846f3966-config-data" (OuterVolumeSpecName: "config-data") pod "3d165be1-162b-40cb-96fa-e66a846f3966" (UID: "3d165be1-162b-40cb-96fa-e66a846f3966"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:53:05 crc kubenswrapper[4631]: I1204 17:53:05.585937 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d165be1-162b-40cb-96fa-e66a846f3966-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3d165be1-162b-40cb-96fa-e66a846f3966" (UID: "3d165be1-162b-40cb-96fa-e66a846f3966"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:53:05 crc kubenswrapper[4631]: I1204 17:53:05.655217 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d165be1-162b-40cb-96fa-e66a846f3966-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:05 crc kubenswrapper[4631]: I1204 17:53:05.655246 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d165be1-162b-40cb-96fa-e66a846f3966-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:05 crc kubenswrapper[4631]: I1204 17:53:05.655258 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fn5bf\" (UniqueName: \"kubernetes.io/projected/3d165be1-162b-40cb-96fa-e66a846f3966-kube-api-access-fn5bf\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.023656 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.023704 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.301710 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"3d165be1-162b-40cb-96fa-e66a846f3966","Type":"ContainerDied","Data":"3077b6d9d55ca794c2dcbeca87b4d9c7cec4eda9e2bf0328c6c175a57f661def"} Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.302518 4631 scope.go:117] "RemoveContainer" containerID="0bcf739528cb8577d5dfa862bb17d9663c270407516495f20bba85a49137237b" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.302784 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.342602 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.353710 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.363502 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 04 17:53:06 crc kubenswrapper[4631]: E1204 17:53:06.364166 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.364274 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon" Dec 04 17:53:06 crc kubenswrapper[4631]: E1204 17:53:06.364357 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d165be1-162b-40cb-96fa-e66a846f3966" containerName="nova-cell0-conductor-conductor" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.364440 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d165be1-162b-40cb-96fa-e66a846f3966" containerName="nova-cell0-conductor-conductor" Dec 04 17:53:06 crc kubenswrapper[4631]: E1204 17:53:06.364519 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon-log" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.364601 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon-log" Dec 04 17:53:06 crc kubenswrapper[4631]: E1204 17:53:06.364681 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.364747 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.365041 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon-log" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.365123 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.365208 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d165be1-162b-40cb-96fa-e66a846f3966" containerName="nova-cell0-conductor-conductor" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.365289 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.366085 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.372622 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.372704 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.373440 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-99lk4" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.504319 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe9c3ab8-326e-49a0-8fe3-b54c15c89051-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"fe9c3ab8-326e-49a0-8fe3-b54c15c89051\") " pod="openstack/nova-cell0-conductor-0" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.504391 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe9c3ab8-326e-49a0-8fe3-b54c15c89051-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"fe9c3ab8-326e-49a0-8fe3-b54c15c89051\") " pod="openstack/nova-cell0-conductor-0" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.504440 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wscjs\" (UniqueName: \"kubernetes.io/projected/fe9c3ab8-326e-49a0-8fe3-b54c15c89051-kube-api-access-wscjs\") pod \"nova-cell0-conductor-0\" (UID: \"fe9c3ab8-326e-49a0-8fe3-b54c15c89051\") " pod="openstack/nova-cell0-conductor-0" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.606174 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe9c3ab8-326e-49a0-8fe3-b54c15c89051-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"fe9c3ab8-326e-49a0-8fe3-b54c15c89051\") " pod="openstack/nova-cell0-conductor-0" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.606222 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe9c3ab8-326e-49a0-8fe3-b54c15c89051-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"fe9c3ab8-326e-49a0-8fe3-b54c15c89051\") " pod="openstack/nova-cell0-conductor-0" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.606264 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wscjs\" (UniqueName: \"kubernetes.io/projected/fe9c3ab8-326e-49a0-8fe3-b54c15c89051-kube-api-access-wscjs\") pod \"nova-cell0-conductor-0\" (UID: \"fe9c3ab8-326e-49a0-8fe3-b54c15c89051\") " pod="openstack/nova-cell0-conductor-0" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.613340 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe9c3ab8-326e-49a0-8fe3-b54c15c89051-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"fe9c3ab8-326e-49a0-8fe3-b54c15c89051\") " pod="openstack/nova-cell0-conductor-0" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.620109 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe9c3ab8-326e-49a0-8fe3-b54c15c89051-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"fe9c3ab8-326e-49a0-8fe3-b54c15c89051\") " pod="openstack/nova-cell0-conductor-0" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.623723 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wscjs\" (UniqueName: \"kubernetes.io/projected/fe9c3ab8-326e-49a0-8fe3-b54c15c89051-kube-api-access-wscjs\") pod \"nova-cell0-conductor-0\" (UID: \"fe9c3ab8-326e-49a0-8fe3-b54c15c89051\") " pod="openstack/nova-cell0-conductor-0" Dec 04 17:53:06 crc kubenswrapper[4631]: I1204 17:53:06.691028 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 04 17:53:07 crc kubenswrapper[4631]: I1204 17:53:07.149269 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 04 17:53:07 crc kubenswrapper[4631]: W1204 17:53:07.163903 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe9c3ab8_326e_49a0_8fe3_b54c15c89051.slice/crio-5397b91773fca31578e287542b22b7a0ffcb37c8744b7b0690413bc5550dad84 WatchSource:0}: Error finding container 5397b91773fca31578e287542b22b7a0ffcb37c8744b7b0690413bc5550dad84: Status 404 returned error can't find the container with id 5397b91773fca31578e287542b22b7a0ffcb37c8744b7b0690413bc5550dad84 Dec 04 17:53:07 crc kubenswrapper[4631]: I1204 17:53:07.311274 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"fe9c3ab8-326e-49a0-8fe3-b54c15c89051","Type":"ContainerStarted","Data":"5397b91773fca31578e287542b22b7a0ffcb37c8744b7b0690413bc5550dad84"} Dec 04 17:53:07 crc kubenswrapper[4631]: I1204 17:53:07.891320 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 04 17:53:08 crc kubenswrapper[4631]: I1204 17:53:08.250755 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d165be1-162b-40cb-96fa-e66a846f3966" path="/var/lib/kubelet/pods/3d165be1-162b-40cb-96fa-e66a846f3966/volumes" Dec 04 17:53:08 crc kubenswrapper[4631]: I1204 17:53:08.321791 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"fe9c3ab8-326e-49a0-8fe3-b54c15c89051","Type":"ContainerStarted","Data":"7725d3eca21d6fca3f0ebfaf8c6aa6e9c13efcf26c5eed890b7b43518c8d327e"} Dec 04 17:53:08 crc kubenswrapper[4631]: I1204 17:53:08.321916 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 04 17:53:08 crc kubenswrapper[4631]: I1204 17:53:08.344701 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.344685584 podStartE2EDuration="2.344685584s" podCreationTimestamp="2025-12-04 17:53:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:53:08.337831938 +0000 UTC m=+1518.370073926" watchObservedRunningTime="2025-12-04 17:53:08.344685584 +0000 UTC m=+1518.376927582" Dec 04 17:53:16 crc kubenswrapper[4631]: I1204 17:53:16.732275 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.192489 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-6c257"] Dec 04 17:53:17 crc kubenswrapper[4631]: E1204 17:53:17.193146 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.193161 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.193393 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="a675d52a-03e9-46e8-8b51-4e7f378179cf" containerName="horizon" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.193972 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6c257" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.197128 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.198156 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.207642 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-6c257"] Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.300698 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/924a7eb5-2c29-49e5-8a1d-60525deff185-config-data\") pod \"nova-cell0-cell-mapping-6c257\" (UID: \"924a7eb5-2c29-49e5-8a1d-60525deff185\") " pod="openstack/nova-cell0-cell-mapping-6c257" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.300793 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924a7eb5-2c29-49e5-8a1d-60525deff185-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6c257\" (UID: \"924a7eb5-2c29-49e5-8a1d-60525deff185\") " pod="openstack/nova-cell0-cell-mapping-6c257" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.300827 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzw69\" (UniqueName: \"kubernetes.io/projected/924a7eb5-2c29-49e5-8a1d-60525deff185-kube-api-access-dzw69\") pod \"nova-cell0-cell-mapping-6c257\" (UID: \"924a7eb5-2c29-49e5-8a1d-60525deff185\") " pod="openstack/nova-cell0-cell-mapping-6c257" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.300883 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/924a7eb5-2c29-49e5-8a1d-60525deff185-scripts\") pod \"nova-cell0-cell-mapping-6c257\" (UID: \"924a7eb5-2c29-49e5-8a1d-60525deff185\") " pod="openstack/nova-cell0-cell-mapping-6c257" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.384327 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.385828 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.396612 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.402199 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/924a7eb5-2c29-49e5-8a1d-60525deff185-scripts\") pod \"nova-cell0-cell-mapping-6c257\" (UID: \"924a7eb5-2c29-49e5-8a1d-60525deff185\") " pod="openstack/nova-cell0-cell-mapping-6c257" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.402295 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/924a7eb5-2c29-49e5-8a1d-60525deff185-config-data\") pod \"nova-cell0-cell-mapping-6c257\" (UID: \"924a7eb5-2c29-49e5-8a1d-60525deff185\") " pod="openstack/nova-cell0-cell-mapping-6c257" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.402345 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924a7eb5-2c29-49e5-8a1d-60525deff185-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6c257\" (UID: \"924a7eb5-2c29-49e5-8a1d-60525deff185\") " pod="openstack/nova-cell0-cell-mapping-6c257" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.402403 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzw69\" (UniqueName: \"kubernetes.io/projected/924a7eb5-2c29-49e5-8a1d-60525deff185-kube-api-access-dzw69\") pod \"nova-cell0-cell-mapping-6c257\" (UID: \"924a7eb5-2c29-49e5-8a1d-60525deff185\") " pod="openstack/nova-cell0-cell-mapping-6c257" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.414094 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924a7eb5-2c29-49e5-8a1d-60525deff185-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6c257\" (UID: \"924a7eb5-2c29-49e5-8a1d-60525deff185\") " pod="openstack/nova-cell0-cell-mapping-6c257" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.416387 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/924a7eb5-2c29-49e5-8a1d-60525deff185-config-data\") pod \"nova-cell0-cell-mapping-6c257\" (UID: \"924a7eb5-2c29-49e5-8a1d-60525deff185\") " pod="openstack/nova-cell0-cell-mapping-6c257" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.416662 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.417077 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/924a7eb5-2c29-49e5-8a1d-60525deff185-scripts\") pod \"nova-cell0-cell-mapping-6c257\" (UID: \"924a7eb5-2c29-49e5-8a1d-60525deff185\") " pod="openstack/nova-cell0-cell-mapping-6c257" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.452929 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzw69\" (UniqueName: \"kubernetes.io/projected/924a7eb5-2c29-49e5-8a1d-60525deff185-kube-api-access-dzw69\") pod \"nova-cell0-cell-mapping-6c257\" (UID: \"924a7eb5-2c29-49e5-8a1d-60525deff185\") " pod="openstack/nova-cell0-cell-mapping-6c257" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.503598 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-logs\") pod \"nova-api-0\" (UID: \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\") " pod="openstack/nova-api-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.503951 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxp2n\" (UniqueName: \"kubernetes.io/projected/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-kube-api-access-zxp2n\") pod \"nova-api-0\" (UID: \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\") " pod="openstack/nova-api-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.504049 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\") " pod="openstack/nova-api-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.504121 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-config-data\") pod \"nova-api-0\" (UID: \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\") " pod="openstack/nova-api-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.520434 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6c257" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.554132 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.555508 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.565739 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.581206 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.608463 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxp2n\" (UniqueName: \"kubernetes.io/projected/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-kube-api-access-zxp2n\") pod \"nova-api-0\" (UID: \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\") " pod="openstack/nova-api-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.608533 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\") " pod="openstack/nova-api-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.608554 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-config-data\") pod \"nova-api-0\" (UID: \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\") " pod="openstack/nova-api-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.608614 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-logs\") pod \"nova-api-0\" (UID: \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\") " pod="openstack/nova-api-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.609622 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-logs\") pod \"nova-api-0\" (UID: \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\") " pod="openstack/nova-api-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.618150 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-config-data\") pod \"nova-api-0\" (UID: \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\") " pod="openstack/nova-api-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.641076 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\") " pod="openstack/nova-api-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.673640 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxp2n\" (UniqueName: \"kubernetes.io/projected/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-kube-api-access-zxp2n\") pod \"nova-api-0\" (UID: \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\") " pod="openstack/nova-api-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.721391 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5jb9\" (UniqueName: \"kubernetes.io/projected/d42596bc-d4b2-4347-93a2-5b36267cf451-kube-api-access-b5jb9\") pod \"nova-scheduler-0\" (UID: \"d42596bc-d4b2-4347-93a2-5b36267cf451\") " pod="openstack/nova-scheduler-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.721666 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d42596bc-d4b2-4347-93a2-5b36267cf451-config-data\") pod \"nova-scheduler-0\" (UID: \"d42596bc-d4b2-4347-93a2-5b36267cf451\") " pod="openstack/nova-scheduler-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.721756 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d42596bc-d4b2-4347-93a2-5b36267cf451-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d42596bc-d4b2-4347-93a2-5b36267cf451\") " pod="openstack/nova-scheduler-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.740303 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.741483 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.753734 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.786189 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.787737 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.794837 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.818573 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.835582 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5jb9\" (UniqueName: \"kubernetes.io/projected/d42596bc-d4b2-4347-93a2-5b36267cf451-kube-api-access-b5jb9\") pod \"nova-scheduler-0\" (UID: \"d42596bc-d4b2-4347-93a2-5b36267cf451\") " pod="openstack/nova-scheduler-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.835837 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d42596bc-d4b2-4347-93a2-5b36267cf451-config-data\") pod \"nova-scheduler-0\" (UID: \"d42596bc-d4b2-4347-93a2-5b36267cf451\") " pod="openstack/nova-scheduler-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.835914 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d42596bc-d4b2-4347-93a2-5b36267cf451-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d42596bc-d4b2-4347-93a2-5b36267cf451\") " pod="openstack/nova-scheduler-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.836032 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6tfs\" (UniqueName: \"kubernetes.io/projected/32ae7f6a-5058-401a-b9bb-f0e6cd188783-kube-api-access-n6tfs\") pod \"nova-cell1-novncproxy-0\" (UID: \"32ae7f6a-5058-401a-b9bb-f0e6cd188783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.836140 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/518c8bf5-91ae-486a-b01c-c9ab8c70a272-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\") " pod="openstack/nova-metadata-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.836214 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/518c8bf5-91ae-486a-b01c-c9ab8c70a272-logs\") pod \"nova-metadata-0\" (UID: \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\") " pod="openstack/nova-metadata-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.836285 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjdk9\" (UniqueName: \"kubernetes.io/projected/518c8bf5-91ae-486a-b01c-c9ab8c70a272-kube-api-access-vjdk9\") pod \"nova-metadata-0\" (UID: \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\") " pod="openstack/nova-metadata-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.836382 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/518c8bf5-91ae-486a-b01c-c9ab8c70a272-config-data\") pod \"nova-metadata-0\" (UID: \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\") " pod="openstack/nova-metadata-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.836457 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32ae7f6a-5058-401a-b9bb-f0e6cd188783-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"32ae7f6a-5058-401a-b9bb-f0e6cd188783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.836539 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32ae7f6a-5058-401a-b9bb-f0e6cd188783-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"32ae7f6a-5058-401a-b9bb-f0e6cd188783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.843056 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d42596bc-d4b2-4347-93a2-5b36267cf451-config-data\") pod \"nova-scheduler-0\" (UID: \"d42596bc-d4b2-4347-93a2-5b36267cf451\") " pod="openstack/nova-scheduler-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.843651 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.848270 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d42596bc-d4b2-4347-93a2-5b36267cf451-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d42596bc-d4b2-4347-93a2-5b36267cf451\") " pod="openstack/nova-scheduler-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.878754 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.880429 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5jb9\" (UniqueName: \"kubernetes.io/projected/d42596bc-d4b2-4347-93a2-5b36267cf451-kube-api-access-b5jb9\") pod \"nova-scheduler-0\" (UID: \"d42596bc-d4b2-4347-93a2-5b36267cf451\") " pod="openstack/nova-scheduler-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.940473 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6tfs\" (UniqueName: \"kubernetes.io/projected/32ae7f6a-5058-401a-b9bb-f0e6cd188783-kube-api-access-n6tfs\") pod \"nova-cell1-novncproxy-0\" (UID: \"32ae7f6a-5058-401a-b9bb-f0e6cd188783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.940552 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/518c8bf5-91ae-486a-b01c-c9ab8c70a272-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\") " pod="openstack/nova-metadata-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.940574 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/518c8bf5-91ae-486a-b01c-c9ab8c70a272-logs\") pod \"nova-metadata-0\" (UID: \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\") " pod="openstack/nova-metadata-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.940591 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjdk9\" (UniqueName: \"kubernetes.io/projected/518c8bf5-91ae-486a-b01c-c9ab8c70a272-kube-api-access-vjdk9\") pod \"nova-metadata-0\" (UID: \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\") " pod="openstack/nova-metadata-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.940614 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/518c8bf5-91ae-486a-b01c-c9ab8c70a272-config-data\") pod \"nova-metadata-0\" (UID: \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\") " pod="openstack/nova-metadata-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.940629 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32ae7f6a-5058-401a-b9bb-f0e6cd188783-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"32ae7f6a-5058-401a-b9bb-f0e6cd188783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.940652 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32ae7f6a-5058-401a-b9bb-f0e6cd188783-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"32ae7f6a-5058-401a-b9bb-f0e6cd188783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.942902 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/518c8bf5-91ae-486a-b01c-c9ab8c70a272-logs\") pod \"nova-metadata-0\" (UID: \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\") " pod="openstack/nova-metadata-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.953045 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32ae7f6a-5058-401a-b9bb-f0e6cd188783-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"32ae7f6a-5058-401a-b9bb-f0e6cd188783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.956704 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/518c8bf5-91ae-486a-b01c-c9ab8c70a272-config-data\") pod \"nova-metadata-0\" (UID: \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\") " pod="openstack/nova-metadata-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.958885 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32ae7f6a-5058-401a-b9bb-f0e6cd188783-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"32ae7f6a-5058-401a-b9bb-f0e6cd188783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.972785 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.977037 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/518c8bf5-91ae-486a-b01c-c9ab8c70a272-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\") " pod="openstack/nova-metadata-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.994110 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjdk9\" (UniqueName: \"kubernetes.io/projected/518c8bf5-91ae-486a-b01c-c9ab8c70a272-kube-api-access-vjdk9\") pod \"nova-metadata-0\" (UID: \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\") " pod="openstack/nova-metadata-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.994192 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-tcd4d"] Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.995021 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6tfs\" (UniqueName: \"kubernetes.io/projected/32ae7f6a-5058-401a-b9bb-f0e6cd188783-kube-api-access-n6tfs\") pod \"nova-cell1-novncproxy-0\" (UID: \"32ae7f6a-5058-401a-b9bb-f0e6cd188783\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:17 crc kubenswrapper[4631]: I1204 17:53:17.995686 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.003448 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-tcd4d"] Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.101838 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.117113 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.145836 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-tcd4d\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.146104 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-tcd4d\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.146213 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-tcd4d\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.146281 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-config\") pod \"dnsmasq-dns-845d6d6f59-tcd4d\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.146408 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zccmx\" (UniqueName: \"kubernetes.io/projected/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-kube-api-access-zccmx\") pod \"dnsmasq-dns-845d6d6f59-tcd4d\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.146503 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-tcd4d\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.248003 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-tcd4d\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.248078 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-tcd4d\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.248134 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-tcd4d\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.248161 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-config\") pod \"dnsmasq-dns-845d6d6f59-tcd4d\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.248212 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zccmx\" (UniqueName: \"kubernetes.io/projected/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-kube-api-access-zccmx\") pod \"dnsmasq-dns-845d6d6f59-tcd4d\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.248244 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-tcd4d\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.249785 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-tcd4d\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.249855 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-tcd4d\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.250027 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-tcd4d\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.251192 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-tcd4d\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.252137 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-config\") pod \"dnsmasq-dns-845d6d6f59-tcd4d\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.275995 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zccmx\" (UniqueName: \"kubernetes.io/projected/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-kube-api-access-zccmx\") pod \"dnsmasq-dns-845d6d6f59-tcd4d\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.340677 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.518698 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-6c257"] Dec 04 17:53:18 crc kubenswrapper[4631]: W1204 17:53:18.543590 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod924a7eb5_2c29_49e5_8a1d_60525deff185.slice/crio-f0376d3a289582ac075518cac59bfa3fffa1fa4c01d537d9880c8384977cdc74 WatchSource:0}: Error finding container f0376d3a289582ac075518cac59bfa3fffa1fa4c01d537d9880c8384977cdc74: Status 404 returned error can't find the container with id f0376d3a289582ac075518cac59bfa3fffa1fa4c01d537d9880c8384977cdc74 Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.701515 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 04 17:53:18 crc kubenswrapper[4631]: I1204 17:53:18.724010 4631 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.152058 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.211618 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.217210 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-tcd4d"] Dec 04 17:53:19 crc kubenswrapper[4631]: W1204 17:53:19.231319 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32ae7f6a_5058_401a_b9bb_f0e6cd188783.slice/crio-e526f1ea9c506f3169c20cc31b557a74920bfcc61f9add6f99b48650bef7c1c3 WatchSource:0}: Error finding container e526f1ea9c506f3169c20cc31b557a74920bfcc61f9add6f99b48650bef7c1c3: Status 404 returned error can't find the container with id e526f1ea9c506f3169c20cc31b557a74920bfcc61f9add6f99b48650bef7c1c3 Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.238945 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.254030 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-92k9c"] Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.258192 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-92k9c" Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.264196 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.264435 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.271612 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-92k9c"] Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.393074 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0848a6da-7687-450b-a5c0-f64a5c0ee32e-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-92k9c\" (UID: \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\") " pod="openstack/nova-cell1-conductor-db-sync-92k9c" Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.394188 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0848a6da-7687-450b-a5c0-f64a5c0ee32e-config-data\") pod \"nova-cell1-conductor-db-sync-92k9c\" (UID: \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\") " pod="openstack/nova-cell1-conductor-db-sync-92k9c" Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.394275 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0848a6da-7687-450b-a5c0-f64a5c0ee32e-scripts\") pod \"nova-cell1-conductor-db-sync-92k9c\" (UID: \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\") " pod="openstack/nova-cell1-conductor-db-sync-92k9c" Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.394715 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpxqg\" (UniqueName: \"kubernetes.io/projected/0848a6da-7687-450b-a5c0-f64a5c0ee32e-kube-api-access-bpxqg\") pod \"nova-cell1-conductor-db-sync-92k9c\" (UID: \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\") " pod="openstack/nova-cell1-conductor-db-sync-92k9c" Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.458341 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"32ae7f6a-5058-401a-b9bb-f0e6cd188783","Type":"ContainerStarted","Data":"e526f1ea9c506f3169c20cc31b557a74920bfcc61f9add6f99b48650bef7c1c3"} Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.464823 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" event={"ID":"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb","Type":"ContainerStarted","Data":"abebaa2f4046e51c9e193d31582a92889625e1a9072287929e08d11950b50347"} Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.467505 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d42596bc-d4b2-4347-93a2-5b36267cf451","Type":"ContainerStarted","Data":"033a40c2d10cfaf4511d49675e8e6e453ad37f8bbcc3166429007bb78a16c5c5"} Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.469252 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5","Type":"ContainerStarted","Data":"ccc6e5b9f197be5065258b1570b2b1654780afe2c1d854937acbe6ef6d55653c"} Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.478695 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"518c8bf5-91ae-486a-b01c-c9ab8c70a272","Type":"ContainerStarted","Data":"a5481bfd29a40ad4b7eccd3b33eaed47a0de2c759254f690cfcffff53a5dfc23"} Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.491752 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6c257" event={"ID":"924a7eb5-2c29-49e5-8a1d-60525deff185","Type":"ContainerStarted","Data":"53bf6f1989ff93e21110faf35f9bc52c6d6555fb2a1fe47c841ac363ba02d3a4"} Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.491790 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6c257" event={"ID":"924a7eb5-2c29-49e5-8a1d-60525deff185","Type":"ContainerStarted","Data":"f0376d3a289582ac075518cac59bfa3fffa1fa4c01d537d9880c8384977cdc74"} Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.496885 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0848a6da-7687-450b-a5c0-f64a5c0ee32e-scripts\") pod \"nova-cell1-conductor-db-sync-92k9c\" (UID: \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\") " pod="openstack/nova-cell1-conductor-db-sync-92k9c" Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.497062 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpxqg\" (UniqueName: \"kubernetes.io/projected/0848a6da-7687-450b-a5c0-f64a5c0ee32e-kube-api-access-bpxqg\") pod \"nova-cell1-conductor-db-sync-92k9c\" (UID: \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\") " pod="openstack/nova-cell1-conductor-db-sync-92k9c" Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.497120 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0848a6da-7687-450b-a5c0-f64a5c0ee32e-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-92k9c\" (UID: \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\") " pod="openstack/nova-cell1-conductor-db-sync-92k9c" Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.497177 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0848a6da-7687-450b-a5c0-f64a5c0ee32e-config-data\") pod \"nova-cell1-conductor-db-sync-92k9c\" (UID: \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\") " pod="openstack/nova-cell1-conductor-db-sync-92k9c" Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.505290 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0848a6da-7687-450b-a5c0-f64a5c0ee32e-config-data\") pod \"nova-cell1-conductor-db-sync-92k9c\" (UID: \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\") " pod="openstack/nova-cell1-conductor-db-sync-92k9c" Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.512111 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0848a6da-7687-450b-a5c0-f64a5c0ee32e-scripts\") pod \"nova-cell1-conductor-db-sync-92k9c\" (UID: \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\") " pod="openstack/nova-cell1-conductor-db-sync-92k9c" Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.514393 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0848a6da-7687-450b-a5c0-f64a5c0ee32e-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-92k9c\" (UID: \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\") " pod="openstack/nova-cell1-conductor-db-sync-92k9c" Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.524924 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-6c257" podStartSLOduration=2.524897528 podStartE2EDuration="2.524897528s" podCreationTimestamp="2025-12-04 17:53:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:53:19.507821015 +0000 UTC m=+1529.540063013" watchObservedRunningTime="2025-12-04 17:53:19.524897528 +0000 UTC m=+1529.557139526" Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.530473 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpxqg\" (UniqueName: \"kubernetes.io/projected/0848a6da-7687-450b-a5c0-f64a5c0ee32e-kube-api-access-bpxqg\") pod \"nova-cell1-conductor-db-sync-92k9c\" (UID: \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\") " pod="openstack/nova-cell1-conductor-db-sync-92k9c" Dec 04 17:53:19 crc kubenswrapper[4631]: I1204 17:53:19.584138 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-92k9c" Dec 04 17:53:20 crc kubenswrapper[4631]: I1204 17:53:20.116913 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-92k9c"] Dec 04 17:53:20 crc kubenswrapper[4631]: I1204 17:53:20.515174 4631 generic.go:334] "Generic (PLEG): container finished" podID="bc96d3f1-aae9-4aac-bef8-c745f17a1ffb" containerID="1f4e202f91c152fafe1f3d6a072fe5384da653e8742304ecbc24edef73f1933b" exitCode=0 Dec 04 17:53:20 crc kubenswrapper[4631]: I1204 17:53:20.515289 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" event={"ID":"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb","Type":"ContainerDied","Data":"1f4e202f91c152fafe1f3d6a072fe5384da653e8742304ecbc24edef73f1933b"} Dec 04 17:53:20 crc kubenswrapper[4631]: I1204 17:53:20.523076 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-92k9c" event={"ID":"0848a6da-7687-450b-a5c0-f64a5c0ee32e","Type":"ContainerStarted","Data":"a6a319c92c0f49bf5b30b3c573e09fe9636e45b06eb24af03a08e50558039ac4"} Dec 04 17:53:21 crc kubenswrapper[4631]: I1204 17:53:21.380837 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 04 17:53:21 crc kubenswrapper[4631]: I1204 17:53:21.406415 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:53:21 crc kubenswrapper[4631]: I1204 17:53:21.531177 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-92k9c" event={"ID":"0848a6da-7687-450b-a5c0-f64a5c0ee32e","Type":"ContainerStarted","Data":"1effa5b1412a3fbf930a9c959f059909d6c4e0d94184100b87c360a6dc8e27f7"} Dec 04 17:53:21 crc kubenswrapper[4631]: I1204 17:53:21.547972 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-92k9c" podStartSLOduration=2.547954551 podStartE2EDuration="2.547954551s" podCreationTimestamp="2025-12-04 17:53:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:53:21.546039289 +0000 UTC m=+1531.578281287" watchObservedRunningTime="2025-12-04 17:53:21.547954551 +0000 UTC m=+1531.580196549" Dec 04 17:53:24 crc kubenswrapper[4631]: I1204 17:53:24.568501 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="32ae7f6a-5058-401a-b9bb-f0e6cd188783" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://0efe7561e67485a29034a29fe861bcad2eae3a701c3aa722abb9d3b034f4d9ac" gracePeriod=30 Dec 04 17:53:24 crc kubenswrapper[4631]: I1204 17:53:24.568665 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"32ae7f6a-5058-401a-b9bb-f0e6cd188783","Type":"ContainerStarted","Data":"0efe7561e67485a29034a29fe861bcad2eae3a701c3aa722abb9d3b034f4d9ac"} Dec 04 17:53:24 crc kubenswrapper[4631]: I1204 17:53:24.571441 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" event={"ID":"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb","Type":"ContainerStarted","Data":"746d57cf5231329cd3a205b43e21fa72819c4012dd19093bf303f4acb6845144"} Dec 04 17:53:24 crc kubenswrapper[4631]: I1204 17:53:24.571979 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:24 crc kubenswrapper[4631]: I1204 17:53:24.575353 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d42596bc-d4b2-4347-93a2-5b36267cf451","Type":"ContainerStarted","Data":"8e22b2b88bd870cab6d3e12efd52e59785b2e000eaf56829689c8731d6db1ca8"} Dec 04 17:53:24 crc kubenswrapper[4631]: I1204 17:53:24.579883 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5","Type":"ContainerStarted","Data":"fa7a82b1a0c4defc90f7904ac4ac9f22650422f8989750eca355e753193cf54c"} Dec 04 17:53:24 crc kubenswrapper[4631]: I1204 17:53:24.579939 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5","Type":"ContainerStarted","Data":"b97c1c06fec99bfbbf77a854761ab75c26d4afd5318b0d597b4cebd22e4dc4c3"} Dec 04 17:53:24 crc kubenswrapper[4631]: I1204 17:53:24.585886 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"518c8bf5-91ae-486a-b01c-c9ab8c70a272","Type":"ContainerStarted","Data":"c2d36cd9c0d529514bee8629228118b59a0010c45056e8c06e9a820ba6d2fa9e"} Dec 04 17:53:24 crc kubenswrapper[4631]: I1204 17:53:24.585932 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"518c8bf5-91ae-486a-b01c-c9ab8c70a272","Type":"ContainerStarted","Data":"cfb40f7f80875ab75e8dd5de120bdafda5e72c82de6badee61b1f01b64272add"} Dec 04 17:53:24 crc kubenswrapper[4631]: I1204 17:53:24.586046 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="518c8bf5-91ae-486a-b01c-c9ab8c70a272" containerName="nova-metadata-log" containerID="cri-o://cfb40f7f80875ab75e8dd5de120bdafda5e72c82de6badee61b1f01b64272add" gracePeriod=30 Dec 04 17:53:24 crc kubenswrapper[4631]: I1204 17:53:24.586264 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="518c8bf5-91ae-486a-b01c-c9ab8c70a272" containerName="nova-metadata-metadata" containerID="cri-o://c2d36cd9c0d529514bee8629228118b59a0010c45056e8c06e9a820ba6d2fa9e" gracePeriod=30 Dec 04 17:53:24 crc kubenswrapper[4631]: I1204 17:53:24.593024 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.615257775 podStartE2EDuration="7.593000713s" podCreationTimestamp="2025-12-04 17:53:17 +0000 UTC" firstStartedPulling="2025-12-04 17:53:19.253576914 +0000 UTC m=+1529.285818912" lastFinishedPulling="2025-12-04 17:53:23.231319852 +0000 UTC m=+1533.263561850" observedRunningTime="2025-12-04 17:53:24.584978315 +0000 UTC m=+1534.617220323" watchObservedRunningTime="2025-12-04 17:53:24.593000713 +0000 UTC m=+1534.625242711" Dec 04 17:53:24 crc kubenswrapper[4631]: I1204 17:53:24.615555 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" podStartSLOduration=7.615541275 podStartE2EDuration="7.615541275s" podCreationTimestamp="2025-12-04 17:53:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:53:24.605245695 +0000 UTC m=+1534.637487713" watchObservedRunningTime="2025-12-04 17:53:24.615541275 +0000 UTC m=+1534.647783273" Dec 04 17:53:24 crc kubenswrapper[4631]: I1204 17:53:24.630890 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.122562132 podStartE2EDuration="7.630872401s" podCreationTimestamp="2025-12-04 17:53:17 +0000 UTC" firstStartedPulling="2025-12-04 17:53:18.723741453 +0000 UTC m=+1528.755983451" lastFinishedPulling="2025-12-04 17:53:23.232051722 +0000 UTC m=+1533.264293720" observedRunningTime="2025-12-04 17:53:24.624317153 +0000 UTC m=+1534.656559151" watchObservedRunningTime="2025-12-04 17:53:24.630872401 +0000 UTC m=+1534.663114399" Dec 04 17:53:24 crc kubenswrapper[4631]: I1204 17:53:24.671148 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.733197635 podStartE2EDuration="7.671128103s" podCreationTimestamp="2025-12-04 17:53:17 +0000 UTC" firstStartedPulling="2025-12-04 17:53:19.293283851 +0000 UTC m=+1529.325525849" lastFinishedPulling="2025-12-04 17:53:23.231214319 +0000 UTC m=+1533.263456317" observedRunningTime="2025-12-04 17:53:24.667650699 +0000 UTC m=+1534.699892697" watchObservedRunningTime="2025-12-04 17:53:24.671128103 +0000 UTC m=+1534.703370101" Dec 04 17:53:24 crc kubenswrapper[4631]: I1204 17:53:24.674366 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.556308955 podStartE2EDuration="7.674358851s" podCreationTimestamp="2025-12-04 17:53:17 +0000 UTC" firstStartedPulling="2025-12-04 17:53:19.112713841 +0000 UTC m=+1529.144955839" lastFinishedPulling="2025-12-04 17:53:23.230763737 +0000 UTC m=+1533.263005735" observedRunningTime="2025-12-04 17:53:24.646931977 +0000 UTC m=+1534.679173985" watchObservedRunningTime="2025-12-04 17:53:24.674358851 +0000 UTC m=+1534.706600849" Dec 04 17:53:25 crc kubenswrapper[4631]: I1204 17:53:25.612378 4631 generic.go:334] "Generic (PLEG): container finished" podID="518c8bf5-91ae-486a-b01c-c9ab8c70a272" containerID="c2d36cd9c0d529514bee8629228118b59a0010c45056e8c06e9a820ba6d2fa9e" exitCode=0 Dec 04 17:53:25 crc kubenswrapper[4631]: I1204 17:53:25.613046 4631 generic.go:334] "Generic (PLEG): container finished" podID="518c8bf5-91ae-486a-b01c-c9ab8c70a272" containerID="cfb40f7f80875ab75e8dd5de120bdafda5e72c82de6badee61b1f01b64272add" exitCode=143 Dec 04 17:53:25 crc kubenswrapper[4631]: I1204 17:53:25.613795 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"518c8bf5-91ae-486a-b01c-c9ab8c70a272","Type":"ContainerDied","Data":"c2d36cd9c0d529514bee8629228118b59a0010c45056e8c06e9a820ba6d2fa9e"} Dec 04 17:53:25 crc kubenswrapper[4631]: I1204 17:53:25.613832 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"518c8bf5-91ae-486a-b01c-c9ab8c70a272","Type":"ContainerDied","Data":"cfb40f7f80875ab75e8dd5de120bdafda5e72c82de6badee61b1f01b64272add"} Dec 04 17:53:25 crc kubenswrapper[4631]: I1204 17:53:25.613844 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"518c8bf5-91ae-486a-b01c-c9ab8c70a272","Type":"ContainerDied","Data":"a5481bfd29a40ad4b7eccd3b33eaed47a0de2c759254f690cfcffff53a5dfc23"} Dec 04 17:53:25 crc kubenswrapper[4631]: I1204 17:53:25.613855 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5481bfd29a40ad4b7eccd3b33eaed47a0de2c759254f690cfcffff53a5dfc23" Dec 04 17:53:25 crc kubenswrapper[4631]: I1204 17:53:25.613966 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 17:53:25 crc kubenswrapper[4631]: I1204 17:53:25.661007 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vjdk9\" (UniqueName: \"kubernetes.io/projected/518c8bf5-91ae-486a-b01c-c9ab8c70a272-kube-api-access-vjdk9\") pod \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\" (UID: \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\") " Dec 04 17:53:25 crc kubenswrapper[4631]: I1204 17:53:25.661114 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/518c8bf5-91ae-486a-b01c-c9ab8c70a272-config-data\") pod \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\" (UID: \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\") " Dec 04 17:53:25 crc kubenswrapper[4631]: I1204 17:53:25.661189 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/518c8bf5-91ae-486a-b01c-c9ab8c70a272-logs\") pod \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\" (UID: \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\") " Dec 04 17:53:25 crc kubenswrapper[4631]: I1204 17:53:25.661328 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/518c8bf5-91ae-486a-b01c-c9ab8c70a272-combined-ca-bundle\") pod \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\" (UID: \"518c8bf5-91ae-486a-b01c-c9ab8c70a272\") " Dec 04 17:53:25 crc kubenswrapper[4631]: I1204 17:53:25.663113 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/518c8bf5-91ae-486a-b01c-c9ab8c70a272-logs" (OuterVolumeSpecName: "logs") pod "518c8bf5-91ae-486a-b01c-c9ab8c70a272" (UID: "518c8bf5-91ae-486a-b01c-c9ab8c70a272"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:53:25 crc kubenswrapper[4631]: I1204 17:53:25.676542 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/518c8bf5-91ae-486a-b01c-c9ab8c70a272-kube-api-access-vjdk9" (OuterVolumeSpecName: "kube-api-access-vjdk9") pod "518c8bf5-91ae-486a-b01c-c9ab8c70a272" (UID: "518c8bf5-91ae-486a-b01c-c9ab8c70a272"). InnerVolumeSpecName "kube-api-access-vjdk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:53:25 crc kubenswrapper[4631]: I1204 17:53:25.700893 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/518c8bf5-91ae-486a-b01c-c9ab8c70a272-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "518c8bf5-91ae-486a-b01c-c9ab8c70a272" (UID: "518c8bf5-91ae-486a-b01c-c9ab8c70a272"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:53:25 crc kubenswrapper[4631]: I1204 17:53:25.731625 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/518c8bf5-91ae-486a-b01c-c9ab8c70a272-config-data" (OuterVolumeSpecName: "config-data") pod "518c8bf5-91ae-486a-b01c-c9ab8c70a272" (UID: "518c8bf5-91ae-486a-b01c-c9ab8c70a272"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:53:25 crc kubenswrapper[4631]: I1204 17:53:25.763188 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vjdk9\" (UniqueName: \"kubernetes.io/projected/518c8bf5-91ae-486a-b01c-c9ab8c70a272-kube-api-access-vjdk9\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:25 crc kubenswrapper[4631]: I1204 17:53:25.763217 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/518c8bf5-91ae-486a-b01c-c9ab8c70a272-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:25 crc kubenswrapper[4631]: I1204 17:53:25.763227 4631 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/518c8bf5-91ae-486a-b01c-c9ab8c70a272-logs\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:25 crc kubenswrapper[4631]: I1204 17:53:25.763302 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/518c8bf5-91ae-486a-b01c-c9ab8c70a272-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.620504 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.647486 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.657749 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.682642 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:53:26 crc kubenswrapper[4631]: E1204 17:53:26.683157 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="518c8bf5-91ae-486a-b01c-c9ab8c70a272" containerName="nova-metadata-metadata" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.683181 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="518c8bf5-91ae-486a-b01c-c9ab8c70a272" containerName="nova-metadata-metadata" Dec 04 17:53:26 crc kubenswrapper[4631]: E1204 17:53:26.683247 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="518c8bf5-91ae-486a-b01c-c9ab8c70a272" containerName="nova-metadata-log" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.683258 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="518c8bf5-91ae-486a-b01c-c9ab8c70a272" containerName="nova-metadata-log" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.683586 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="518c8bf5-91ae-486a-b01c-c9ab8c70a272" containerName="nova-metadata-metadata" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.683622 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="518c8bf5-91ae-486a-b01c-c9ab8c70a272" containerName="nova-metadata-log" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.684924 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.687994 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.688276 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.748864 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.784545 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f203673d-35bc-41d0-8cf1-f9573fe6bad4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " pod="openstack/nova-metadata-0" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.784651 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f203673d-35bc-41d0-8cf1-f9573fe6bad4-logs\") pod \"nova-metadata-0\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " pod="openstack/nova-metadata-0" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.784712 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dx99\" (UniqueName: \"kubernetes.io/projected/f203673d-35bc-41d0-8cf1-f9573fe6bad4-kube-api-access-2dx99\") pod \"nova-metadata-0\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " pod="openstack/nova-metadata-0" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.784737 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f203673d-35bc-41d0-8cf1-f9573fe6bad4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " pod="openstack/nova-metadata-0" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.784984 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f203673d-35bc-41d0-8cf1-f9573fe6bad4-config-data\") pod \"nova-metadata-0\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " pod="openstack/nova-metadata-0" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.886453 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f203673d-35bc-41d0-8cf1-f9573fe6bad4-config-data\") pod \"nova-metadata-0\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " pod="openstack/nova-metadata-0" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.886806 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f203673d-35bc-41d0-8cf1-f9573fe6bad4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " pod="openstack/nova-metadata-0" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.886913 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f203673d-35bc-41d0-8cf1-f9573fe6bad4-logs\") pod \"nova-metadata-0\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " pod="openstack/nova-metadata-0" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.887326 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f203673d-35bc-41d0-8cf1-f9573fe6bad4-logs\") pod \"nova-metadata-0\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " pod="openstack/nova-metadata-0" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.887954 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dx99\" (UniqueName: \"kubernetes.io/projected/f203673d-35bc-41d0-8cf1-f9573fe6bad4-kube-api-access-2dx99\") pod \"nova-metadata-0\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " pod="openstack/nova-metadata-0" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.888306 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f203673d-35bc-41d0-8cf1-f9573fe6bad4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " pod="openstack/nova-metadata-0" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.891799 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f203673d-35bc-41d0-8cf1-f9573fe6bad4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " pod="openstack/nova-metadata-0" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.892428 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f203673d-35bc-41d0-8cf1-f9573fe6bad4-config-data\") pod \"nova-metadata-0\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " pod="openstack/nova-metadata-0" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.893357 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f203673d-35bc-41d0-8cf1-f9573fe6bad4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " pod="openstack/nova-metadata-0" Dec 04 17:53:26 crc kubenswrapper[4631]: I1204 17:53:26.913913 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dx99\" (UniqueName: \"kubernetes.io/projected/f203673d-35bc-41d0-8cf1-f9573fe6bad4-kube-api-access-2dx99\") pod \"nova-metadata-0\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " pod="openstack/nova-metadata-0" Dec 04 17:53:27 crc kubenswrapper[4631]: I1204 17:53:27.034930 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 17:53:27 crc kubenswrapper[4631]: I1204 17:53:27.573629 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:53:27 crc kubenswrapper[4631]: W1204 17:53:27.598937 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf203673d_35bc_41d0_8cf1_f9573fe6bad4.slice/crio-1a246cc7e688028eba601aa73d88e3d2b953d932f9d7b59ea0fd35de5f9dcce7 WatchSource:0}: Error finding container 1a246cc7e688028eba601aa73d88e3d2b953d932f9d7b59ea0fd35de5f9dcce7: Status 404 returned error can't find the container with id 1a246cc7e688028eba601aa73d88e3d2b953d932f9d7b59ea0fd35de5f9dcce7 Dec 04 17:53:27 crc kubenswrapper[4631]: I1204 17:53:27.629250 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f203673d-35bc-41d0-8cf1-f9573fe6bad4","Type":"ContainerStarted","Data":"1a246cc7e688028eba601aa73d88e3d2b953d932f9d7b59ea0fd35de5f9dcce7"} Dec 04 17:53:27 crc kubenswrapper[4631]: I1204 17:53:27.847694 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 04 17:53:27 crc kubenswrapper[4631]: I1204 17:53:27.848191 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 04 17:53:27 crc kubenswrapper[4631]: I1204 17:53:27.973021 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 04 17:53:27 crc kubenswrapper[4631]: I1204 17:53:27.973924 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 04 17:53:28 crc kubenswrapper[4631]: I1204 17:53:28.001960 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 04 17:53:28 crc kubenswrapper[4631]: I1204 17:53:28.102940 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:28 crc kubenswrapper[4631]: I1204 17:53:28.259414 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="518c8bf5-91ae-486a-b01c-c9ab8c70a272" path="/var/lib/kubelet/pods/518c8bf5-91ae-486a-b01c-c9ab8c70a272/volumes" Dec 04 17:53:28 crc kubenswrapper[4631]: I1204 17:53:28.343105 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:53:28 crc kubenswrapper[4631]: I1204 17:53:28.426289 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-jkhnk"] Dec 04 17:53:28 crc kubenswrapper[4631]: I1204 17:53:28.426507 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" podUID="ffcbb49b-28de-482a-a1c7-6d055f0cbf52" containerName="dnsmasq-dns" containerID="cri-o://d719c65490dae9465c3b7c17d44844548bfbac04f60cb4fbf9f84111308b1a94" gracePeriod=10 Dec 04 17:53:28 crc kubenswrapper[4631]: I1204 17:53:28.662210 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f203673d-35bc-41d0-8cf1-f9573fe6bad4","Type":"ContainerStarted","Data":"f1ba7214cbf227079682f40107e92a05250ab6a87aec6b252e9086426c11b6a0"} Dec 04 17:53:28 crc kubenswrapper[4631]: I1204 17:53:28.662263 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f203673d-35bc-41d0-8cf1-f9573fe6bad4","Type":"ContainerStarted","Data":"0f6138524d20f0cbb607b4f4280ddb57a25c4bc53ba4d475b81d6e8e0e8b859e"} Dec 04 17:53:28 crc kubenswrapper[4631]: I1204 17:53:28.678536 4631 generic.go:334] "Generic (PLEG): container finished" podID="ffcbb49b-28de-482a-a1c7-6d055f0cbf52" containerID="d719c65490dae9465c3b7c17d44844548bfbac04f60cb4fbf9f84111308b1a94" exitCode=0 Dec 04 17:53:28 crc kubenswrapper[4631]: I1204 17:53:28.678582 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" event={"ID":"ffcbb49b-28de-482a-a1c7-6d055f0cbf52","Type":"ContainerDied","Data":"d719c65490dae9465c3b7c17d44844548bfbac04f60cb4fbf9f84111308b1a94"} Dec 04 17:53:28 crc kubenswrapper[4631]: I1204 17:53:28.730972 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 04 17:53:28 crc kubenswrapper[4631]: I1204 17:53:28.769599 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.769579388 podStartE2EDuration="2.769579388s" podCreationTimestamp="2025-12-04 17:53:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:53:28.712978622 +0000 UTC m=+1538.745220620" watchObservedRunningTime="2025-12-04 17:53:28.769579388 +0000 UTC m=+1538.801821386" Dec 04 17:53:28 crc kubenswrapper[4631]: I1204 17:53:28.939592 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.186:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 17:53:28 crc kubenswrapper[4631]: I1204 17:53:28.939883 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.186:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.084093 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.145462 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-dns-swift-storage-0\") pod \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.145551 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-config\") pod \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.145613 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-ovsdbserver-sb\") pod \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.145640 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-ovsdbserver-nb\") pod \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.145663 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6v96b\" (UniqueName: \"kubernetes.io/projected/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-kube-api-access-6v96b\") pod \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.145722 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-dns-svc\") pod \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\" (UID: \"ffcbb49b-28de-482a-a1c7-6d055f0cbf52\") " Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.152088 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-kube-api-access-6v96b" (OuterVolumeSpecName: "kube-api-access-6v96b") pod "ffcbb49b-28de-482a-a1c7-6d055f0cbf52" (UID: "ffcbb49b-28de-482a-a1c7-6d055f0cbf52"). InnerVolumeSpecName "kube-api-access-6v96b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.250558 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6v96b\" (UniqueName: \"kubernetes.io/projected/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-kube-api-access-6v96b\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.256064 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-config" (OuterVolumeSpecName: "config") pod "ffcbb49b-28de-482a-a1c7-6d055f0cbf52" (UID: "ffcbb49b-28de-482a-a1c7-6d055f0cbf52"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.265187 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ffcbb49b-28de-482a-a1c7-6d055f0cbf52" (UID: "ffcbb49b-28de-482a-a1c7-6d055f0cbf52"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.272115 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ffcbb49b-28de-482a-a1c7-6d055f0cbf52" (UID: "ffcbb49b-28de-482a-a1c7-6d055f0cbf52"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.279859 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ffcbb49b-28de-482a-a1c7-6d055f0cbf52" (UID: "ffcbb49b-28de-482a-a1c7-6d055f0cbf52"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.313824 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ffcbb49b-28de-482a-a1c7-6d055f0cbf52" (UID: "ffcbb49b-28de-482a-a1c7-6d055f0cbf52"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.351816 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.351845 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.351856 4631 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.351865 4631 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.351874 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffcbb49b-28de-482a-a1c7-6d055f0cbf52-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.689870 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" event={"ID":"ffcbb49b-28de-482a-a1c7-6d055f0cbf52","Type":"ContainerDied","Data":"c94786d9ad8e5e993397a38e6961147cac0448a8ca02c467d9d998f27aebe538"} Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.689920 4631 scope.go:117] "RemoveContainer" containerID="d719c65490dae9465c3b7c17d44844548bfbac04f60cb4fbf9f84111308b1a94" Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.689893 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-jkhnk" Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.717294 4631 scope.go:117] "RemoveContainer" containerID="5c7048f2af7e5427bc8d6bf5212421b3cd85be6b8b56dd36e18d7584e6715121" Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.725959 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-jkhnk"] Dec 04 17:53:29 crc kubenswrapper[4631]: I1204 17:53:29.733387 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-jkhnk"] Dec 04 17:53:30 crc kubenswrapper[4631]: I1204 17:53:30.251809 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffcbb49b-28de-482a-a1c7-6d055f0cbf52" path="/var/lib/kubelet/pods/ffcbb49b-28de-482a-a1c7-6d055f0cbf52/volumes" Dec 04 17:53:30 crc kubenswrapper[4631]: I1204 17:53:30.705416 4631 generic.go:334] "Generic (PLEG): container finished" podID="924a7eb5-2c29-49e5-8a1d-60525deff185" containerID="53bf6f1989ff93e21110faf35f9bc52c6d6555fb2a1fe47c841ac363ba02d3a4" exitCode=0 Dec 04 17:53:30 crc kubenswrapper[4631]: I1204 17:53:30.705788 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6c257" event={"ID":"924a7eb5-2c29-49e5-8a1d-60525deff185","Type":"ContainerDied","Data":"53bf6f1989ff93e21110faf35f9bc52c6d6555fb2a1fe47c841ac363ba02d3a4"} Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.037217 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.037560 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.094654 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6c257" Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.208550 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/924a7eb5-2c29-49e5-8a1d-60525deff185-config-data\") pod \"924a7eb5-2c29-49e5-8a1d-60525deff185\" (UID: \"924a7eb5-2c29-49e5-8a1d-60525deff185\") " Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.208608 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/924a7eb5-2c29-49e5-8a1d-60525deff185-scripts\") pod \"924a7eb5-2c29-49e5-8a1d-60525deff185\" (UID: \"924a7eb5-2c29-49e5-8a1d-60525deff185\") " Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.208705 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzw69\" (UniqueName: \"kubernetes.io/projected/924a7eb5-2c29-49e5-8a1d-60525deff185-kube-api-access-dzw69\") pod \"924a7eb5-2c29-49e5-8a1d-60525deff185\" (UID: \"924a7eb5-2c29-49e5-8a1d-60525deff185\") " Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.209577 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924a7eb5-2c29-49e5-8a1d-60525deff185-combined-ca-bundle\") pod \"924a7eb5-2c29-49e5-8a1d-60525deff185\" (UID: \"924a7eb5-2c29-49e5-8a1d-60525deff185\") " Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.219649 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/924a7eb5-2c29-49e5-8a1d-60525deff185-scripts" (OuterVolumeSpecName: "scripts") pod "924a7eb5-2c29-49e5-8a1d-60525deff185" (UID: "924a7eb5-2c29-49e5-8a1d-60525deff185"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.219678 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/924a7eb5-2c29-49e5-8a1d-60525deff185-kube-api-access-dzw69" (OuterVolumeSpecName: "kube-api-access-dzw69") pod "924a7eb5-2c29-49e5-8a1d-60525deff185" (UID: "924a7eb5-2c29-49e5-8a1d-60525deff185"). InnerVolumeSpecName "kube-api-access-dzw69". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.240070 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/924a7eb5-2c29-49e5-8a1d-60525deff185-config-data" (OuterVolumeSpecName: "config-data") pod "924a7eb5-2c29-49e5-8a1d-60525deff185" (UID: "924a7eb5-2c29-49e5-8a1d-60525deff185"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.241741 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/924a7eb5-2c29-49e5-8a1d-60525deff185-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "924a7eb5-2c29-49e5-8a1d-60525deff185" (UID: "924a7eb5-2c29-49e5-8a1d-60525deff185"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.311890 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/924a7eb5-2c29-49e5-8a1d-60525deff185-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.311924 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/924a7eb5-2c29-49e5-8a1d-60525deff185-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.311939 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzw69\" (UniqueName: \"kubernetes.io/projected/924a7eb5-2c29-49e5-8a1d-60525deff185-kube-api-access-dzw69\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.311952 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924a7eb5-2c29-49e5-8a1d-60525deff185-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.728075 4631 generic.go:334] "Generic (PLEG): container finished" podID="0848a6da-7687-450b-a5c0-f64a5c0ee32e" containerID="1effa5b1412a3fbf930a9c959f059909d6c4e0d94184100b87c360a6dc8e27f7" exitCode=0 Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.728425 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-92k9c" event={"ID":"0848a6da-7687-450b-a5c0-f64a5c0ee32e","Type":"ContainerDied","Data":"1effa5b1412a3fbf930a9c959f059909d6c4e0d94184100b87c360a6dc8e27f7"} Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.730241 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6c257" event={"ID":"924a7eb5-2c29-49e5-8a1d-60525deff185","Type":"ContainerDied","Data":"f0376d3a289582ac075518cac59bfa3fffa1fa4c01d537d9880c8384977cdc74"} Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.730349 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f0376d3a289582ac075518cac59bfa3fffa1fa4c01d537d9880c8384977cdc74" Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.730510 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6c257" Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.903912 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.904432 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5" containerName="nova-api-log" containerID="cri-o://b97c1c06fec99bfbbf77a854761ab75c26d4afd5318b0d597b4cebd22e4dc4c3" gracePeriod=30 Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.904576 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5" containerName="nova-api-api" containerID="cri-o://fa7a82b1a0c4defc90f7904ac4ac9f22650422f8989750eca355e753193cf54c" gracePeriod=30 Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.923296 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.923539 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="d42596bc-d4b2-4347-93a2-5b36267cf451" containerName="nova-scheduler-scheduler" containerID="cri-o://8e22b2b88bd870cab6d3e12efd52e59785b2e000eaf56829689c8731d6db1ca8" gracePeriod=30 Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.953257 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.953568 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f203673d-35bc-41d0-8cf1-f9573fe6bad4" containerName="nova-metadata-log" containerID="cri-o://0f6138524d20f0cbb607b4f4280ddb57a25c4bc53ba4d475b81d6e8e0e8b859e" gracePeriod=30 Dec 04 17:53:32 crc kubenswrapper[4631]: I1204 17:53:32.953668 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f203673d-35bc-41d0-8cf1-f9573fe6bad4" containerName="nova-metadata-metadata" containerID="cri-o://f1ba7214cbf227079682f40107e92a05250ab6a87aec6b252e9086426c11b6a0" gracePeriod=30 Dec 04 17:53:32 crc kubenswrapper[4631]: E1204 17:53:32.974945 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8e22b2b88bd870cab6d3e12efd52e59785b2e000eaf56829689c8731d6db1ca8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 04 17:53:32 crc kubenswrapper[4631]: E1204 17:53:32.977638 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8e22b2b88bd870cab6d3e12efd52e59785b2e000eaf56829689c8731d6db1ca8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 04 17:53:32 crc kubenswrapper[4631]: E1204 17:53:32.980849 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8e22b2b88bd870cab6d3e12efd52e59785b2e000eaf56829689c8731d6db1ca8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 04 17:53:32 crc kubenswrapper[4631]: E1204 17:53:32.980918 4631 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="d42596bc-d4b2-4347-93a2-5b36267cf451" containerName="nova-scheduler-scheduler" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.538811 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.639841 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f203673d-35bc-41d0-8cf1-f9573fe6bad4-config-data\") pod \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.640159 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f203673d-35bc-41d0-8cf1-f9573fe6bad4-combined-ca-bundle\") pod \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.640882 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f203673d-35bc-41d0-8cf1-f9573fe6bad4-nova-metadata-tls-certs\") pod \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.640902 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dx99\" (UniqueName: \"kubernetes.io/projected/f203673d-35bc-41d0-8cf1-f9573fe6bad4-kube-api-access-2dx99\") pod \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.640955 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f203673d-35bc-41d0-8cf1-f9573fe6bad4-logs\") pod \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\" (UID: \"f203673d-35bc-41d0-8cf1-f9573fe6bad4\") " Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.642808 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f203673d-35bc-41d0-8cf1-f9573fe6bad4-logs" (OuterVolumeSpecName: "logs") pod "f203673d-35bc-41d0-8cf1-f9573fe6bad4" (UID: "f203673d-35bc-41d0-8cf1-f9573fe6bad4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.660660 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f203673d-35bc-41d0-8cf1-f9573fe6bad4-kube-api-access-2dx99" (OuterVolumeSpecName: "kube-api-access-2dx99") pod "f203673d-35bc-41d0-8cf1-f9573fe6bad4" (UID: "f203673d-35bc-41d0-8cf1-f9573fe6bad4"). InnerVolumeSpecName "kube-api-access-2dx99". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.680452 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f203673d-35bc-41d0-8cf1-f9573fe6bad4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f203673d-35bc-41d0-8cf1-f9573fe6bad4" (UID: "f203673d-35bc-41d0-8cf1-f9573fe6bad4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.701444 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f203673d-35bc-41d0-8cf1-f9573fe6bad4-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "f203673d-35bc-41d0-8cf1-f9573fe6bad4" (UID: "f203673d-35bc-41d0-8cf1-f9573fe6bad4"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.701526 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f203673d-35bc-41d0-8cf1-f9573fe6bad4-config-data" (OuterVolumeSpecName: "config-data") pod "f203673d-35bc-41d0-8cf1-f9573fe6bad4" (UID: "f203673d-35bc-41d0-8cf1-f9573fe6bad4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.742737 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f203673d-35bc-41d0-8cf1-f9573fe6bad4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.742765 4631 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f203673d-35bc-41d0-8cf1-f9573fe6bad4-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.742776 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dx99\" (UniqueName: \"kubernetes.io/projected/f203673d-35bc-41d0-8cf1-f9573fe6bad4-kube-api-access-2dx99\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.742785 4631 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f203673d-35bc-41d0-8cf1-f9573fe6bad4-logs\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.742796 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f203673d-35bc-41d0-8cf1-f9573fe6bad4-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.748995 4631 generic.go:334] "Generic (PLEG): container finished" podID="f203673d-35bc-41d0-8cf1-f9573fe6bad4" containerID="f1ba7214cbf227079682f40107e92a05250ab6a87aec6b252e9086426c11b6a0" exitCode=0 Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.749022 4631 generic.go:334] "Generic (PLEG): container finished" podID="f203673d-35bc-41d0-8cf1-f9573fe6bad4" containerID="0f6138524d20f0cbb607b4f4280ddb57a25c4bc53ba4d475b81d6e8e0e8b859e" exitCode=143 Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.749035 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f203673d-35bc-41d0-8cf1-f9573fe6bad4","Type":"ContainerDied","Data":"f1ba7214cbf227079682f40107e92a05250ab6a87aec6b252e9086426c11b6a0"} Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.749076 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.749097 4631 scope.go:117] "RemoveContainer" containerID="f1ba7214cbf227079682f40107e92a05250ab6a87aec6b252e9086426c11b6a0" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.749081 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f203673d-35bc-41d0-8cf1-f9573fe6bad4","Type":"ContainerDied","Data":"0f6138524d20f0cbb607b4f4280ddb57a25c4bc53ba4d475b81d6e8e0e8b859e"} Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.749239 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f203673d-35bc-41d0-8cf1-f9573fe6bad4","Type":"ContainerDied","Data":"1a246cc7e688028eba601aa73d88e3d2b953d932f9d7b59ea0fd35de5f9dcce7"} Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.758039 4631 generic.go:334] "Generic (PLEG): container finished" podID="6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5" containerID="b97c1c06fec99bfbbf77a854761ab75c26d4afd5318b0d597b4cebd22e4dc4c3" exitCode=143 Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.758189 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5","Type":"ContainerDied","Data":"b97c1c06fec99bfbbf77a854761ab75c26d4afd5318b0d597b4cebd22e4dc4c3"} Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.782036 4631 scope.go:117] "RemoveContainer" containerID="0f6138524d20f0cbb607b4f4280ddb57a25c4bc53ba4d475b81d6e8e0e8b859e" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.787541 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.798720 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.819551 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:53:33 crc kubenswrapper[4631]: E1204 17:53:33.819981 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f203673d-35bc-41d0-8cf1-f9573fe6bad4" containerName="nova-metadata-log" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.819994 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="f203673d-35bc-41d0-8cf1-f9573fe6bad4" containerName="nova-metadata-log" Dec 04 17:53:33 crc kubenswrapper[4631]: E1204 17:53:33.820018 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffcbb49b-28de-482a-a1c7-6d055f0cbf52" containerName="dnsmasq-dns" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.820024 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffcbb49b-28de-482a-a1c7-6d055f0cbf52" containerName="dnsmasq-dns" Dec 04 17:53:33 crc kubenswrapper[4631]: E1204 17:53:33.820047 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffcbb49b-28de-482a-a1c7-6d055f0cbf52" containerName="init" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.820053 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffcbb49b-28de-482a-a1c7-6d055f0cbf52" containerName="init" Dec 04 17:53:33 crc kubenswrapper[4631]: E1204 17:53:33.820061 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="924a7eb5-2c29-49e5-8a1d-60525deff185" containerName="nova-manage" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.820067 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="924a7eb5-2c29-49e5-8a1d-60525deff185" containerName="nova-manage" Dec 04 17:53:33 crc kubenswrapper[4631]: E1204 17:53:33.820078 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f203673d-35bc-41d0-8cf1-f9573fe6bad4" containerName="nova-metadata-metadata" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.820083 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="f203673d-35bc-41d0-8cf1-f9573fe6bad4" containerName="nova-metadata-metadata" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.820243 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffcbb49b-28de-482a-a1c7-6d055f0cbf52" containerName="dnsmasq-dns" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.820256 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="924a7eb5-2c29-49e5-8a1d-60525deff185" containerName="nova-manage" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.820279 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="f203673d-35bc-41d0-8cf1-f9573fe6bad4" containerName="nova-metadata-metadata" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.820289 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="f203673d-35bc-41d0-8cf1-f9573fe6bad4" containerName="nova-metadata-log" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.821200 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.824667 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.824846 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.834330 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.851557 4631 scope.go:117] "RemoveContainer" containerID="f1ba7214cbf227079682f40107e92a05250ab6a87aec6b252e9086426c11b6a0" Dec 04 17:53:33 crc kubenswrapper[4631]: E1204 17:53:33.856445 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1ba7214cbf227079682f40107e92a05250ab6a87aec6b252e9086426c11b6a0\": container with ID starting with f1ba7214cbf227079682f40107e92a05250ab6a87aec6b252e9086426c11b6a0 not found: ID does not exist" containerID="f1ba7214cbf227079682f40107e92a05250ab6a87aec6b252e9086426c11b6a0" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.856521 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1ba7214cbf227079682f40107e92a05250ab6a87aec6b252e9086426c11b6a0"} err="failed to get container status \"f1ba7214cbf227079682f40107e92a05250ab6a87aec6b252e9086426c11b6a0\": rpc error: code = NotFound desc = could not find container \"f1ba7214cbf227079682f40107e92a05250ab6a87aec6b252e9086426c11b6a0\": container with ID starting with f1ba7214cbf227079682f40107e92a05250ab6a87aec6b252e9086426c11b6a0 not found: ID does not exist" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.856709 4631 scope.go:117] "RemoveContainer" containerID="0f6138524d20f0cbb607b4f4280ddb57a25c4bc53ba4d475b81d6e8e0e8b859e" Dec 04 17:53:33 crc kubenswrapper[4631]: E1204 17:53:33.859818 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f6138524d20f0cbb607b4f4280ddb57a25c4bc53ba4d475b81d6e8e0e8b859e\": container with ID starting with 0f6138524d20f0cbb607b4f4280ddb57a25c4bc53ba4d475b81d6e8e0e8b859e not found: ID does not exist" containerID="0f6138524d20f0cbb607b4f4280ddb57a25c4bc53ba4d475b81d6e8e0e8b859e" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.859860 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f6138524d20f0cbb607b4f4280ddb57a25c4bc53ba4d475b81d6e8e0e8b859e"} err="failed to get container status \"0f6138524d20f0cbb607b4f4280ddb57a25c4bc53ba4d475b81d6e8e0e8b859e\": rpc error: code = NotFound desc = could not find container \"0f6138524d20f0cbb607b4f4280ddb57a25c4bc53ba4d475b81d6e8e0e8b859e\": container with ID starting with 0f6138524d20f0cbb607b4f4280ddb57a25c4bc53ba4d475b81d6e8e0e8b859e not found: ID does not exist" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.859889 4631 scope.go:117] "RemoveContainer" containerID="f1ba7214cbf227079682f40107e92a05250ab6a87aec6b252e9086426c11b6a0" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.860236 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1ba7214cbf227079682f40107e92a05250ab6a87aec6b252e9086426c11b6a0"} err="failed to get container status \"f1ba7214cbf227079682f40107e92a05250ab6a87aec6b252e9086426c11b6a0\": rpc error: code = NotFound desc = could not find container \"f1ba7214cbf227079682f40107e92a05250ab6a87aec6b252e9086426c11b6a0\": container with ID starting with f1ba7214cbf227079682f40107e92a05250ab6a87aec6b252e9086426c11b6a0 not found: ID does not exist" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.860256 4631 scope.go:117] "RemoveContainer" containerID="0f6138524d20f0cbb607b4f4280ddb57a25c4bc53ba4d475b81d6e8e0e8b859e" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.860617 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f6138524d20f0cbb607b4f4280ddb57a25c4bc53ba4d475b81d6e8e0e8b859e"} err="failed to get container status \"0f6138524d20f0cbb607b4f4280ddb57a25c4bc53ba4d475b81d6e8e0e8b859e\": rpc error: code = NotFound desc = could not find container \"0f6138524d20f0cbb607b4f4280ddb57a25c4bc53ba4d475b81d6e8e0e8b859e\": container with ID starting with 0f6138524d20f0cbb607b4f4280ddb57a25c4bc53ba4d475b81d6e8e0e8b859e not found: ID does not exist" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.948092 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/abf757d5-6165-4cb3-9e40-b0eec2920b02-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " pod="openstack/nova-metadata-0" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.948236 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abf757d5-6165-4cb3-9e40-b0eec2920b02-config-data\") pod \"nova-metadata-0\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " pod="openstack/nova-metadata-0" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.948286 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mf5tx\" (UniqueName: \"kubernetes.io/projected/abf757d5-6165-4cb3-9e40-b0eec2920b02-kube-api-access-mf5tx\") pod \"nova-metadata-0\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " pod="openstack/nova-metadata-0" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.948398 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/abf757d5-6165-4cb3-9e40-b0eec2920b02-logs\") pod \"nova-metadata-0\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " pod="openstack/nova-metadata-0" Dec 04 17:53:33 crc kubenswrapper[4631]: I1204 17:53:33.948451 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abf757d5-6165-4cb3-9e40-b0eec2920b02-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " pod="openstack/nova-metadata-0" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.053441 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/abf757d5-6165-4cb3-9e40-b0eec2920b02-logs\") pod \"nova-metadata-0\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " pod="openstack/nova-metadata-0" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.053523 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abf757d5-6165-4cb3-9e40-b0eec2920b02-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " pod="openstack/nova-metadata-0" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.053586 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/abf757d5-6165-4cb3-9e40-b0eec2920b02-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " pod="openstack/nova-metadata-0" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.053736 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abf757d5-6165-4cb3-9e40-b0eec2920b02-config-data\") pod \"nova-metadata-0\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " pod="openstack/nova-metadata-0" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.053786 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mf5tx\" (UniqueName: \"kubernetes.io/projected/abf757d5-6165-4cb3-9e40-b0eec2920b02-kube-api-access-mf5tx\") pod \"nova-metadata-0\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " pod="openstack/nova-metadata-0" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.054739 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/abf757d5-6165-4cb3-9e40-b0eec2920b02-logs\") pod \"nova-metadata-0\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " pod="openstack/nova-metadata-0" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.063045 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abf757d5-6165-4cb3-9e40-b0eec2920b02-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " pod="openstack/nova-metadata-0" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.070510 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abf757d5-6165-4cb3-9e40-b0eec2920b02-config-data\") pod \"nova-metadata-0\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " pod="openstack/nova-metadata-0" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.073959 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/abf757d5-6165-4cb3-9e40-b0eec2920b02-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " pod="openstack/nova-metadata-0" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.097004 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mf5tx\" (UniqueName: \"kubernetes.io/projected/abf757d5-6165-4cb3-9e40-b0eec2920b02-kube-api-access-mf5tx\") pod \"nova-metadata-0\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " pod="openstack/nova-metadata-0" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.164188 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.254466 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f203673d-35bc-41d0-8cf1-f9573fe6bad4" path="/var/lib/kubelet/pods/f203673d-35bc-41d0-8cf1-f9573fe6bad4/volumes" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.268038 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-92k9c" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.362963 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0848a6da-7687-450b-a5c0-f64a5c0ee32e-config-data\") pod \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\" (UID: \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\") " Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.363070 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0848a6da-7687-450b-a5c0-f64a5c0ee32e-combined-ca-bundle\") pod \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\" (UID: \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\") " Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.363134 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bpxqg\" (UniqueName: \"kubernetes.io/projected/0848a6da-7687-450b-a5c0-f64a5c0ee32e-kube-api-access-bpxqg\") pod \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\" (UID: \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\") " Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.363188 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0848a6da-7687-450b-a5c0-f64a5c0ee32e-scripts\") pod \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\" (UID: \"0848a6da-7687-450b-a5c0-f64a5c0ee32e\") " Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.368474 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0848a6da-7687-450b-a5c0-f64a5c0ee32e-kube-api-access-bpxqg" (OuterVolumeSpecName: "kube-api-access-bpxqg") pod "0848a6da-7687-450b-a5c0-f64a5c0ee32e" (UID: "0848a6da-7687-450b-a5c0-f64a5c0ee32e"). InnerVolumeSpecName "kube-api-access-bpxqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.377665 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0848a6da-7687-450b-a5c0-f64a5c0ee32e-scripts" (OuterVolumeSpecName: "scripts") pod "0848a6da-7687-450b-a5c0-f64a5c0ee32e" (UID: "0848a6da-7687-450b-a5c0-f64a5c0ee32e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.402555 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0848a6da-7687-450b-a5c0-f64a5c0ee32e-config-data" (OuterVolumeSpecName: "config-data") pod "0848a6da-7687-450b-a5c0-f64a5c0ee32e" (UID: "0848a6da-7687-450b-a5c0-f64a5c0ee32e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.420572 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0848a6da-7687-450b-a5c0-f64a5c0ee32e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0848a6da-7687-450b-a5c0-f64a5c0ee32e" (UID: "0848a6da-7687-450b-a5c0-f64a5c0ee32e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.470629 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0848a6da-7687-450b-a5c0-f64a5c0ee32e-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.470655 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0848a6da-7687-450b-a5c0-f64a5c0ee32e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.470668 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bpxqg\" (UniqueName: \"kubernetes.io/projected/0848a6da-7687-450b-a5c0-f64a5c0ee32e-kube-api-access-bpxqg\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.470678 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0848a6da-7687-450b-a5c0-f64a5c0ee32e-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.779289 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-92k9c" event={"ID":"0848a6da-7687-450b-a5c0-f64a5c0ee32e","Type":"ContainerDied","Data":"a6a319c92c0f49bf5b30b3c573e09fe9636e45b06eb24af03a08e50558039ac4"} Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.779326 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6a319c92c0f49bf5b30b3c573e09fe9636e45b06eb24af03a08e50558039ac4" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.779387 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-92k9c" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.804156 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:53:34 crc kubenswrapper[4631]: W1204 17:53:34.806726 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabf757d5_6165_4cb3_9e40_b0eec2920b02.slice/crio-32de8b6d3d4701ce651615620fb001aff9f511d9b0c742e6b2db82286f104fc8 WatchSource:0}: Error finding container 32de8b6d3d4701ce651615620fb001aff9f511d9b0c742e6b2db82286f104fc8: Status 404 returned error can't find the container with id 32de8b6d3d4701ce651615620fb001aff9f511d9b0c742e6b2db82286f104fc8 Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.850963 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 04 17:53:34 crc kubenswrapper[4631]: E1204 17:53:34.851733 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0848a6da-7687-450b-a5c0-f64a5c0ee32e" containerName="nova-cell1-conductor-db-sync" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.851847 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0848a6da-7687-450b-a5c0-f64a5c0ee32e" containerName="nova-cell1-conductor-db-sync" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.852179 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="0848a6da-7687-450b-a5c0-f64a5c0ee32e" containerName="nova-cell1-conductor-db-sync" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.853054 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.856016 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.871562 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.989580 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f18cd83-a5c6-455c-87de-2549f96b9073-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"8f18cd83-a5c6-455c-87de-2549f96b9073\") " pod="openstack/nova-cell1-conductor-0" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.989690 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f18cd83-a5c6-455c-87de-2549f96b9073-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"8f18cd83-a5c6-455c-87de-2549f96b9073\") " pod="openstack/nova-cell1-conductor-0" Dec 04 17:53:34 crc kubenswrapper[4631]: I1204 17:53:34.989853 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmxd9\" (UniqueName: \"kubernetes.io/projected/8f18cd83-a5c6-455c-87de-2549f96b9073-kube-api-access-jmxd9\") pod \"nova-cell1-conductor-0\" (UID: \"8f18cd83-a5c6-455c-87de-2549f96b9073\") " pod="openstack/nova-cell1-conductor-0" Dec 04 17:53:35 crc kubenswrapper[4631]: I1204 17:53:35.091943 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmxd9\" (UniqueName: \"kubernetes.io/projected/8f18cd83-a5c6-455c-87de-2549f96b9073-kube-api-access-jmxd9\") pod \"nova-cell1-conductor-0\" (UID: \"8f18cd83-a5c6-455c-87de-2549f96b9073\") " pod="openstack/nova-cell1-conductor-0" Dec 04 17:53:35 crc kubenswrapper[4631]: I1204 17:53:35.092345 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f18cd83-a5c6-455c-87de-2549f96b9073-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"8f18cd83-a5c6-455c-87de-2549f96b9073\") " pod="openstack/nova-cell1-conductor-0" Dec 04 17:53:35 crc kubenswrapper[4631]: I1204 17:53:35.092461 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f18cd83-a5c6-455c-87de-2549f96b9073-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"8f18cd83-a5c6-455c-87de-2549f96b9073\") " pod="openstack/nova-cell1-conductor-0" Dec 04 17:53:35 crc kubenswrapper[4631]: I1204 17:53:35.097546 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f18cd83-a5c6-455c-87de-2549f96b9073-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"8f18cd83-a5c6-455c-87de-2549f96b9073\") " pod="openstack/nova-cell1-conductor-0" Dec 04 17:53:35 crc kubenswrapper[4631]: I1204 17:53:35.103962 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f18cd83-a5c6-455c-87de-2549f96b9073-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"8f18cd83-a5c6-455c-87de-2549f96b9073\") " pod="openstack/nova-cell1-conductor-0" Dec 04 17:53:35 crc kubenswrapper[4631]: I1204 17:53:35.108544 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmxd9\" (UniqueName: \"kubernetes.io/projected/8f18cd83-a5c6-455c-87de-2549f96b9073-kube-api-access-jmxd9\") pod \"nova-cell1-conductor-0\" (UID: \"8f18cd83-a5c6-455c-87de-2549f96b9073\") " pod="openstack/nova-cell1-conductor-0" Dec 04 17:53:35 crc kubenswrapper[4631]: I1204 17:53:35.171939 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 04 17:53:35 crc kubenswrapper[4631]: I1204 17:53:35.627291 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 04 17:53:35 crc kubenswrapper[4631]: I1204 17:53:35.793392 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"abf757d5-6165-4cb3-9e40-b0eec2920b02","Type":"ContainerStarted","Data":"440108ea496c9ab660f6b98ceab053949b035d7b63ea3e20aaa516e2e5ae3c16"} Dec 04 17:53:35 crc kubenswrapper[4631]: I1204 17:53:35.794706 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"abf757d5-6165-4cb3-9e40-b0eec2920b02","Type":"ContainerStarted","Data":"86b19dcb6971fa3c48d1e270ff2cb133a09b5de1d1c819038d02434ff32618f2"} Dec 04 17:53:35 crc kubenswrapper[4631]: I1204 17:53:35.794754 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"abf757d5-6165-4cb3-9e40-b0eec2920b02","Type":"ContainerStarted","Data":"32de8b6d3d4701ce651615620fb001aff9f511d9b0c742e6b2db82286f104fc8"} Dec 04 17:53:35 crc kubenswrapper[4631]: I1204 17:53:35.796103 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"8f18cd83-a5c6-455c-87de-2549f96b9073","Type":"ContainerStarted","Data":"50caf36f70fd32d84c9094694d17e0e7fad3ab8169e651382462935840393a8e"} Dec 04 17:53:35 crc kubenswrapper[4631]: I1204 17:53:35.796143 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"8f18cd83-a5c6-455c-87de-2549f96b9073","Type":"ContainerStarted","Data":"ac0ff403ef9a512dbe5be4fb2f0c4133e430143a606f0567a6c00197f37686c5"} Dec 04 17:53:35 crc kubenswrapper[4631]: I1204 17:53:35.796615 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 04 17:53:35 crc kubenswrapper[4631]: I1204 17:53:35.845685 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.845658682 podStartE2EDuration="2.845658682s" podCreationTimestamp="2025-12-04 17:53:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:53:35.826992006 +0000 UTC m=+1545.859234004" watchObservedRunningTime="2025-12-04 17:53:35.845658682 +0000 UTC m=+1545.877900700" Dec 04 17:53:35 crc kubenswrapper[4631]: I1204 17:53:35.858842 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=1.85882239 podStartE2EDuration="1.85882239s" podCreationTimestamp="2025-12-04 17:53:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:53:35.84408142 +0000 UTC m=+1545.876323418" watchObservedRunningTime="2025-12-04 17:53:35.85882239 +0000 UTC m=+1545.891064388" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.022241 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.022302 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.022357 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.023287 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009"} pod="openshift-machine-config-operator/machine-config-daemon-q27wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.023345 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" containerID="cri-o://05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" gracePeriod=600 Dec 04 17:53:36 crc kubenswrapper[4631]: E1204 17:53:36.144687 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.641869 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.732061 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d42596bc-d4b2-4347-93a2-5b36267cf451-config-data\") pod \"d42596bc-d4b2-4347-93a2-5b36267cf451\" (UID: \"d42596bc-d4b2-4347-93a2-5b36267cf451\") " Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.732508 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d42596bc-d4b2-4347-93a2-5b36267cf451-combined-ca-bundle\") pod \"d42596bc-d4b2-4347-93a2-5b36267cf451\" (UID: \"d42596bc-d4b2-4347-93a2-5b36267cf451\") " Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.732735 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5jb9\" (UniqueName: \"kubernetes.io/projected/d42596bc-d4b2-4347-93a2-5b36267cf451-kube-api-access-b5jb9\") pod \"d42596bc-d4b2-4347-93a2-5b36267cf451\" (UID: \"d42596bc-d4b2-4347-93a2-5b36267cf451\") " Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.739326 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d42596bc-d4b2-4347-93a2-5b36267cf451-kube-api-access-b5jb9" (OuterVolumeSpecName: "kube-api-access-b5jb9") pod "d42596bc-d4b2-4347-93a2-5b36267cf451" (UID: "d42596bc-d4b2-4347-93a2-5b36267cf451"). InnerVolumeSpecName "kube-api-access-b5jb9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.768199 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d42596bc-d4b2-4347-93a2-5b36267cf451-config-data" (OuterVolumeSpecName: "config-data") pod "d42596bc-d4b2-4347-93a2-5b36267cf451" (UID: "d42596bc-d4b2-4347-93a2-5b36267cf451"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.769084 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.770001 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d42596bc-d4b2-4347-93a2-5b36267cf451-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d42596bc-d4b2-4347-93a2-5b36267cf451" (UID: "d42596bc-d4b2-4347-93a2-5b36267cf451"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.818136 4631 generic.go:334] "Generic (PLEG): container finished" podID="6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5" containerID="fa7a82b1a0c4defc90f7904ac4ac9f22650422f8989750eca355e753193cf54c" exitCode=0 Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.818197 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5","Type":"ContainerDied","Data":"fa7a82b1a0c4defc90f7904ac4ac9f22650422f8989750eca355e753193cf54c"} Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.818244 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.818266 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5","Type":"ContainerDied","Data":"ccc6e5b9f197be5065258b1570b2b1654780afe2c1d854937acbe6ef6d55653c"} Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.818295 4631 scope.go:117] "RemoveContainer" containerID="fa7a82b1a0c4defc90f7904ac4ac9f22650422f8989750eca355e753193cf54c" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.826963 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" exitCode=0 Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.827028 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerDied","Data":"05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009"} Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.827555 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:53:36 crc kubenswrapper[4631]: E1204 17:53:36.827901 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.832168 4631 generic.go:334] "Generic (PLEG): container finished" podID="d42596bc-d4b2-4347-93a2-5b36267cf451" containerID="8e22b2b88bd870cab6d3e12efd52e59785b2e000eaf56829689c8731d6db1ca8" exitCode=0 Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.832296 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d42596bc-d4b2-4347-93a2-5b36267cf451","Type":"ContainerDied","Data":"8e22b2b88bd870cab6d3e12efd52e59785b2e000eaf56829689c8731d6db1ca8"} Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.832348 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d42596bc-d4b2-4347-93a2-5b36267cf451","Type":"ContainerDied","Data":"033a40c2d10cfaf4511d49675e8e6e453ad37f8bbcc3166429007bb78a16c5c5"} Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.832416 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.834662 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-logs\") pod \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\" (UID: \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\") " Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.834782 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxp2n\" (UniqueName: \"kubernetes.io/projected/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-kube-api-access-zxp2n\") pod \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\" (UID: \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\") " Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.834856 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-combined-ca-bundle\") pod \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\" (UID: \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\") " Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.834925 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-config-data\") pod \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\" (UID: \"6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5\") " Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.835625 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d42596bc-d4b2-4347-93a2-5b36267cf451-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.835643 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5jb9\" (UniqueName: \"kubernetes.io/projected/d42596bc-d4b2-4347-93a2-5b36267cf451-kube-api-access-b5jb9\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.835658 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d42596bc-d4b2-4347-93a2-5b36267cf451-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.839328 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-logs" (OuterVolumeSpecName: "logs") pod "6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5" (UID: "6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.850339 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-kube-api-access-zxp2n" (OuterVolumeSpecName: "kube-api-access-zxp2n") pod "6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5" (UID: "6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5"). InnerVolumeSpecName "kube-api-access-zxp2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.859313 4631 scope.go:117] "RemoveContainer" containerID="b97c1c06fec99bfbbf77a854761ab75c26d4afd5318b0d597b4cebd22e4dc4c3" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.889520 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-config-data" (OuterVolumeSpecName: "config-data") pod "6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5" (UID: "6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.899449 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.906975 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5" (UID: "6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.909616 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.918849 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 17:53:36 crc kubenswrapper[4631]: E1204 17:53:36.919227 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5" containerName="nova-api-api" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.919246 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5" containerName="nova-api-api" Dec 04 17:53:36 crc kubenswrapper[4631]: E1204 17:53:36.919275 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5" containerName="nova-api-log" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.919283 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5" containerName="nova-api-log" Dec 04 17:53:36 crc kubenswrapper[4631]: E1204 17:53:36.919307 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d42596bc-d4b2-4347-93a2-5b36267cf451" containerName="nova-scheduler-scheduler" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.919317 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="d42596bc-d4b2-4347-93a2-5b36267cf451" containerName="nova-scheduler-scheduler" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.919563 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5" containerName="nova-api-log" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.919585 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="d42596bc-d4b2-4347-93a2-5b36267cf451" containerName="nova-scheduler-scheduler" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.919603 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5" containerName="nova-api-api" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.919605 4631 scope.go:117] "RemoveContainer" containerID="fa7a82b1a0c4defc90f7904ac4ac9f22650422f8989750eca355e753193cf54c" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.920129 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 17:53:36 crc kubenswrapper[4631]: E1204 17:53:36.920642 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa7a82b1a0c4defc90f7904ac4ac9f22650422f8989750eca355e753193cf54c\": container with ID starting with fa7a82b1a0c4defc90f7904ac4ac9f22650422f8989750eca355e753193cf54c not found: ID does not exist" containerID="fa7a82b1a0c4defc90f7904ac4ac9f22650422f8989750eca355e753193cf54c" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.920667 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa7a82b1a0c4defc90f7904ac4ac9f22650422f8989750eca355e753193cf54c"} err="failed to get container status \"fa7a82b1a0c4defc90f7904ac4ac9f22650422f8989750eca355e753193cf54c\": rpc error: code = NotFound desc = could not find container \"fa7a82b1a0c4defc90f7904ac4ac9f22650422f8989750eca355e753193cf54c\": container with ID starting with fa7a82b1a0c4defc90f7904ac4ac9f22650422f8989750eca355e753193cf54c not found: ID does not exist" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.920686 4631 scope.go:117] "RemoveContainer" containerID="b97c1c06fec99bfbbf77a854761ab75c26d4afd5318b0d597b4cebd22e4dc4c3" Dec 04 17:53:36 crc kubenswrapper[4631]: E1204 17:53:36.922405 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b97c1c06fec99bfbbf77a854761ab75c26d4afd5318b0d597b4cebd22e4dc4c3\": container with ID starting with b97c1c06fec99bfbbf77a854761ab75c26d4afd5318b0d597b4cebd22e4dc4c3 not found: ID does not exist" containerID="b97c1c06fec99bfbbf77a854761ab75c26d4afd5318b0d597b4cebd22e4dc4c3" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.922432 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b97c1c06fec99bfbbf77a854761ab75c26d4afd5318b0d597b4cebd22e4dc4c3"} err="failed to get container status \"b97c1c06fec99bfbbf77a854761ab75c26d4afd5318b0d597b4cebd22e4dc4c3\": rpc error: code = NotFound desc = could not find container \"b97c1c06fec99bfbbf77a854761ab75c26d4afd5318b0d597b4cebd22e4dc4c3\": container with ID starting with b97c1c06fec99bfbbf77a854761ab75c26d4afd5318b0d597b4cebd22e4dc4c3 not found: ID does not exist" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.922447 4631 scope.go:117] "RemoveContainer" containerID="748b1412c888c95d08adbf71c3c971aef4060a8000682c7031f16f9f8ee657ac" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.922660 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.937724 4631 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-logs\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.937747 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxp2n\" (UniqueName: \"kubernetes.io/projected/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-kube-api-access-zxp2n\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.937758 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.937766 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.950634 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.964327 4631 scope.go:117] "RemoveContainer" containerID="8e22b2b88bd870cab6d3e12efd52e59785b2e000eaf56829689c8731d6db1ca8" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.985320 4631 scope.go:117] "RemoveContainer" containerID="8e22b2b88bd870cab6d3e12efd52e59785b2e000eaf56829689c8731d6db1ca8" Dec 04 17:53:36 crc kubenswrapper[4631]: E1204 17:53:36.985734 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e22b2b88bd870cab6d3e12efd52e59785b2e000eaf56829689c8731d6db1ca8\": container with ID starting with 8e22b2b88bd870cab6d3e12efd52e59785b2e000eaf56829689c8731d6db1ca8 not found: ID does not exist" containerID="8e22b2b88bd870cab6d3e12efd52e59785b2e000eaf56829689c8731d6db1ca8" Dec 04 17:53:36 crc kubenswrapper[4631]: I1204 17:53:36.985763 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e22b2b88bd870cab6d3e12efd52e59785b2e000eaf56829689c8731d6db1ca8"} err="failed to get container status \"8e22b2b88bd870cab6d3e12efd52e59785b2e000eaf56829689c8731d6db1ca8\": rpc error: code = NotFound desc = could not find container \"8e22b2b88bd870cab6d3e12efd52e59785b2e000eaf56829689c8731d6db1ca8\": container with ID starting with 8e22b2b88bd870cab6d3e12efd52e59785b2e000eaf56829689c8731d6db1ca8 not found: ID does not exist" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.038980 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e\") " pod="openstack/nova-scheduler-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.040525 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjjtg\" (UniqueName: \"kubernetes.io/projected/aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e-kube-api-access-rjjtg\") pod \"nova-scheduler-0\" (UID: \"aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e\") " pod="openstack/nova-scheduler-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.040819 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e-config-data\") pod \"nova-scheduler-0\" (UID: \"aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e\") " pod="openstack/nova-scheduler-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.142610 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e-config-data\") pod \"nova-scheduler-0\" (UID: \"aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e\") " pod="openstack/nova-scheduler-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.142915 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e\") " pod="openstack/nova-scheduler-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.142958 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjjtg\" (UniqueName: \"kubernetes.io/projected/aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e-kube-api-access-rjjtg\") pod \"nova-scheduler-0\" (UID: \"aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e\") " pod="openstack/nova-scheduler-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.149770 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e-config-data\") pod \"nova-scheduler-0\" (UID: \"aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e\") " pod="openstack/nova-scheduler-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.159777 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.165403 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e\") " pod="openstack/nova-scheduler-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.174785 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.197564 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.198961 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjjtg\" (UniqueName: \"kubernetes.io/projected/aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e-kube-api-access-rjjtg\") pod \"nova-scheduler-0\" (UID: \"aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e\") " pod="openstack/nova-scheduler-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.199092 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.206283 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.216535 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.253099 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.347641 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db0e53ae-f2fd-47d8-a073-694e7894bcb4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\") " pod="openstack/nova-api-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.347803 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db0e53ae-f2fd-47d8-a073-694e7894bcb4-logs\") pod \"nova-api-0\" (UID: \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\") " pod="openstack/nova-api-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.347854 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db0e53ae-f2fd-47d8-a073-694e7894bcb4-config-data\") pod \"nova-api-0\" (UID: \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\") " pod="openstack/nova-api-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.347890 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk8sz\" (UniqueName: \"kubernetes.io/projected/db0e53ae-f2fd-47d8-a073-694e7894bcb4-kube-api-access-gk8sz\") pod \"nova-api-0\" (UID: \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\") " pod="openstack/nova-api-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.450450 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db0e53ae-f2fd-47d8-a073-694e7894bcb4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\") " pod="openstack/nova-api-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.450560 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db0e53ae-f2fd-47d8-a073-694e7894bcb4-logs\") pod \"nova-api-0\" (UID: \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\") " pod="openstack/nova-api-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.450604 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db0e53ae-f2fd-47d8-a073-694e7894bcb4-config-data\") pod \"nova-api-0\" (UID: \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\") " pod="openstack/nova-api-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.450627 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk8sz\" (UniqueName: \"kubernetes.io/projected/db0e53ae-f2fd-47d8-a073-694e7894bcb4-kube-api-access-gk8sz\") pod \"nova-api-0\" (UID: \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\") " pod="openstack/nova-api-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.452491 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db0e53ae-f2fd-47d8-a073-694e7894bcb4-logs\") pod \"nova-api-0\" (UID: \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\") " pod="openstack/nova-api-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.468519 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db0e53ae-f2fd-47d8-a073-694e7894bcb4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\") " pod="openstack/nova-api-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.469469 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db0e53ae-f2fd-47d8-a073-694e7894bcb4-config-data\") pod \"nova-api-0\" (UID: \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\") " pod="openstack/nova-api-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.484872 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk8sz\" (UniqueName: \"kubernetes.io/projected/db0e53ae-f2fd-47d8-a073-694e7894bcb4-kube-api-access-gk8sz\") pod \"nova-api-0\" (UID: \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\") " pod="openstack/nova-api-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.561349 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.768980 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 17:53:37 crc kubenswrapper[4631]: I1204 17:53:37.852149 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e","Type":"ContainerStarted","Data":"274527604e7924f89593fe3aa8a1ea7dd9f9ba733a639a73463aae60b22983a4"} Dec 04 17:53:38 crc kubenswrapper[4631]: I1204 17:53:38.018091 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 04 17:53:38 crc kubenswrapper[4631]: I1204 17:53:38.295557 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5" path="/var/lib/kubelet/pods/6f8bfe37-399a-484a-8ffe-ee3ad6c28bb5/volumes" Dec 04 17:53:38 crc kubenswrapper[4631]: I1204 17:53:38.296392 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d42596bc-d4b2-4347-93a2-5b36267cf451" path="/var/lib/kubelet/pods/d42596bc-d4b2-4347-93a2-5b36267cf451/volumes" Dec 04 17:53:38 crc kubenswrapper[4631]: I1204 17:53:38.862217 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e","Type":"ContainerStarted","Data":"ed3a4215006d32029c189e7d0d23590233f9c8f12e197ab155e8019abbf3d828"} Dec 04 17:53:38 crc kubenswrapper[4631]: I1204 17:53:38.865323 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"db0e53ae-f2fd-47d8-a073-694e7894bcb4","Type":"ContainerStarted","Data":"4b3821d02895a992cd451dac29931706c05ee8d771772452cfbaeada7e433833"} Dec 04 17:53:38 crc kubenswrapper[4631]: I1204 17:53:38.865358 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"db0e53ae-f2fd-47d8-a073-694e7894bcb4","Type":"ContainerStarted","Data":"f29aaaa98fd50c41397ce6ef7ed3a792e21c1ded1594cc217bd6f1dabc4cf0db"} Dec 04 17:53:38 crc kubenswrapper[4631]: I1204 17:53:38.865386 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"db0e53ae-f2fd-47d8-a073-694e7894bcb4","Type":"ContainerStarted","Data":"49f7fefbd0dbdc59c92c024cbe6234181851c1b9846c9df356f503cca0625a4c"} Dec 04 17:53:38 crc kubenswrapper[4631]: I1204 17:53:38.889008 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.888990557 podStartE2EDuration="2.888990557s" podCreationTimestamp="2025-12-04 17:53:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:53:38.877954998 +0000 UTC m=+1548.910196996" watchObservedRunningTime="2025-12-04 17:53:38.888990557 +0000 UTC m=+1548.921232555" Dec 04 17:53:38 crc kubenswrapper[4631]: I1204 17:53:38.901728 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.9017000020000001 podStartE2EDuration="1.901700002s" podCreationTimestamp="2025-12-04 17:53:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:53:38.89681325 +0000 UTC m=+1548.929055248" watchObservedRunningTime="2025-12-04 17:53:38.901700002 +0000 UTC m=+1548.933941990" Dec 04 17:53:39 crc kubenswrapper[4631]: I1204 17:53:39.165571 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 04 17:53:39 crc kubenswrapper[4631]: I1204 17:53:39.165670 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 04 17:53:40 crc kubenswrapper[4631]: I1204 17:53:40.206346 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 04 17:53:42 crc kubenswrapper[4631]: I1204 17:53:42.253727 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 04 17:53:44 crc kubenswrapper[4631]: I1204 17:53:44.165432 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 04 17:53:44 crc kubenswrapper[4631]: I1204 17:53:44.166473 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 04 17:53:45 crc kubenswrapper[4631]: I1204 17:53:45.181539 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="abf757d5-6165-4cb3-9e40-b0eec2920b02" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 17:53:45 crc kubenswrapper[4631]: I1204 17:53:45.181617 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="abf757d5-6165-4cb3-9e40-b0eec2920b02" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 17:53:47 crc kubenswrapper[4631]: I1204 17:53:47.253595 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 04 17:53:47 crc kubenswrapper[4631]: I1204 17:53:47.281370 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 04 17:53:47 crc kubenswrapper[4631]: I1204 17:53:47.561806 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 04 17:53:47 crc kubenswrapper[4631]: I1204 17:53:47.561856 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 04 17:53:47 crc kubenswrapper[4631]: I1204 17:53:47.982152 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 04 17:53:48 crc kubenswrapper[4631]: I1204 17:53:48.644529 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="db0e53ae-f2fd-47d8-a073-694e7894bcb4" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 17:53:48 crc kubenswrapper[4631]: I1204 17:53:48.644533 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="db0e53ae-f2fd-47d8-a073-694e7894bcb4" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 04 17:53:49 crc kubenswrapper[4631]: I1204 17:53:49.239256 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:53:49 crc kubenswrapper[4631]: E1204 17:53:49.239632 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:53:54 crc kubenswrapper[4631]: I1204 17:53:54.170994 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 04 17:53:54 crc kubenswrapper[4631]: I1204 17:53:54.171929 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 04 17:53:54 crc kubenswrapper[4631]: I1204 17:53:54.176849 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 04 17:53:54 crc kubenswrapper[4631]: I1204 17:53:54.999654 4631 generic.go:334] "Generic (PLEG): container finished" podID="32ae7f6a-5058-401a-b9bb-f0e6cd188783" containerID="0efe7561e67485a29034a29fe861bcad2eae3a701c3aa722abb9d3b034f4d9ac" exitCode=137 Dec 04 17:53:54 crc kubenswrapper[4631]: I1204 17:53:54.999755 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"32ae7f6a-5058-401a-b9bb-f0e6cd188783","Type":"ContainerDied","Data":"0efe7561e67485a29034a29fe861bcad2eae3a701c3aa722abb9d3b034f4d9ac"} Dec 04 17:53:55 crc kubenswrapper[4631]: I1204 17:53:55.010642 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 04 17:53:55 crc kubenswrapper[4631]: I1204 17:53:55.519041 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:55 crc kubenswrapper[4631]: I1204 17:53:55.593453 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6tfs\" (UniqueName: \"kubernetes.io/projected/32ae7f6a-5058-401a-b9bb-f0e6cd188783-kube-api-access-n6tfs\") pod \"32ae7f6a-5058-401a-b9bb-f0e6cd188783\" (UID: \"32ae7f6a-5058-401a-b9bb-f0e6cd188783\") " Dec 04 17:53:55 crc kubenswrapper[4631]: I1204 17:53:55.593947 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32ae7f6a-5058-401a-b9bb-f0e6cd188783-combined-ca-bundle\") pod \"32ae7f6a-5058-401a-b9bb-f0e6cd188783\" (UID: \"32ae7f6a-5058-401a-b9bb-f0e6cd188783\") " Dec 04 17:53:55 crc kubenswrapper[4631]: I1204 17:53:55.594229 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32ae7f6a-5058-401a-b9bb-f0e6cd188783-config-data\") pod \"32ae7f6a-5058-401a-b9bb-f0e6cd188783\" (UID: \"32ae7f6a-5058-401a-b9bb-f0e6cd188783\") " Dec 04 17:53:55 crc kubenswrapper[4631]: I1204 17:53:55.602592 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32ae7f6a-5058-401a-b9bb-f0e6cd188783-kube-api-access-n6tfs" (OuterVolumeSpecName: "kube-api-access-n6tfs") pod "32ae7f6a-5058-401a-b9bb-f0e6cd188783" (UID: "32ae7f6a-5058-401a-b9bb-f0e6cd188783"). InnerVolumeSpecName "kube-api-access-n6tfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:53:55 crc kubenswrapper[4631]: I1204 17:53:55.625710 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32ae7f6a-5058-401a-b9bb-f0e6cd188783-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32ae7f6a-5058-401a-b9bb-f0e6cd188783" (UID: "32ae7f6a-5058-401a-b9bb-f0e6cd188783"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:53:55 crc kubenswrapper[4631]: I1204 17:53:55.627067 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32ae7f6a-5058-401a-b9bb-f0e6cd188783-config-data" (OuterVolumeSpecName: "config-data") pod "32ae7f6a-5058-401a-b9bb-f0e6cd188783" (UID: "32ae7f6a-5058-401a-b9bb-f0e6cd188783"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:53:55 crc kubenswrapper[4631]: I1204 17:53:55.696152 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6tfs\" (UniqueName: \"kubernetes.io/projected/32ae7f6a-5058-401a-b9bb-f0e6cd188783-kube-api-access-n6tfs\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:55 crc kubenswrapper[4631]: I1204 17:53:55.696398 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32ae7f6a-5058-401a-b9bb-f0e6cd188783-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:55 crc kubenswrapper[4631]: I1204 17:53:55.696463 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32ae7f6a-5058-401a-b9bb-f0e6cd188783-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.012014 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"32ae7f6a-5058-401a-b9bb-f0e6cd188783","Type":"ContainerDied","Data":"e526f1ea9c506f3169c20cc31b557a74920bfcc61f9add6f99b48650bef7c1c3"} Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.012066 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.012073 4631 scope.go:117] "RemoveContainer" containerID="0efe7561e67485a29034a29fe861bcad2eae3a701c3aa722abb9d3b034f4d9ac" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.056326 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.078904 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.092084 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 04 17:53:56 crc kubenswrapper[4631]: E1204 17:53:56.092565 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32ae7f6a-5058-401a-b9bb-f0e6cd188783" containerName="nova-cell1-novncproxy-novncproxy" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.092584 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="32ae7f6a-5058-401a-b9bb-f0e6cd188783" containerName="nova-cell1-novncproxy-novncproxy" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.092791 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="32ae7f6a-5058-401a-b9bb-f0e6cd188783" containerName="nova-cell1-novncproxy-novncproxy" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.093491 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.095577 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.095840 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.095942 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.102933 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.205437 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73bece06-ddcd-4bd7-9f77-1c7551dd5c10-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bece06-ddcd-4bd7-9f77-1c7551dd5c10\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.205567 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r56c2\" (UniqueName: \"kubernetes.io/projected/73bece06-ddcd-4bd7-9f77-1c7551dd5c10-kube-api-access-r56c2\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bece06-ddcd-4bd7-9f77-1c7551dd5c10\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.205603 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/73bece06-ddcd-4bd7-9f77-1c7551dd5c10-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bece06-ddcd-4bd7-9f77-1c7551dd5c10\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.205668 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73bece06-ddcd-4bd7-9f77-1c7551dd5c10-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bece06-ddcd-4bd7-9f77-1c7551dd5c10\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.205692 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/73bece06-ddcd-4bd7-9f77-1c7551dd5c10-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bece06-ddcd-4bd7-9f77-1c7551dd5c10\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.250199 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32ae7f6a-5058-401a-b9bb-f0e6cd188783" path="/var/lib/kubelet/pods/32ae7f6a-5058-401a-b9bb-f0e6cd188783/volumes" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.308156 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73bece06-ddcd-4bd7-9f77-1c7551dd5c10-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bece06-ddcd-4bd7-9f77-1c7551dd5c10\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.308308 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/73bece06-ddcd-4bd7-9f77-1c7551dd5c10-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bece06-ddcd-4bd7-9f77-1c7551dd5c10\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.308452 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73bece06-ddcd-4bd7-9f77-1c7551dd5c10-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bece06-ddcd-4bd7-9f77-1c7551dd5c10\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.308562 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r56c2\" (UniqueName: \"kubernetes.io/projected/73bece06-ddcd-4bd7-9f77-1c7551dd5c10-kube-api-access-r56c2\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bece06-ddcd-4bd7-9f77-1c7551dd5c10\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.308605 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/73bece06-ddcd-4bd7-9f77-1c7551dd5c10-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bece06-ddcd-4bd7-9f77-1c7551dd5c10\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.313611 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/73bece06-ddcd-4bd7-9f77-1c7551dd5c10-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bece06-ddcd-4bd7-9f77-1c7551dd5c10\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.313873 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73bece06-ddcd-4bd7-9f77-1c7551dd5c10-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bece06-ddcd-4bd7-9f77-1c7551dd5c10\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.316043 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73bece06-ddcd-4bd7-9f77-1c7551dd5c10-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bece06-ddcd-4bd7-9f77-1c7551dd5c10\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.317292 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/73bece06-ddcd-4bd7-9f77-1c7551dd5c10-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bece06-ddcd-4bd7-9f77-1c7551dd5c10\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.326586 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r56c2\" (UniqueName: \"kubernetes.io/projected/73bece06-ddcd-4bd7-9f77-1c7551dd5c10-kube-api-access-r56c2\") pod \"nova-cell1-novncproxy-0\" (UID: \"73bece06-ddcd-4bd7-9f77-1c7551dd5c10\") " pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.421706 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:53:56 crc kubenswrapper[4631]: I1204 17:53:56.911295 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.023884 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"73bece06-ddcd-4bd7-9f77-1c7551dd5c10","Type":"ContainerStarted","Data":"0d655efa5f8864b9c9ba578edfc2c9689c482989a588c6cb59655bf51779b379"} Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.566126 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.566612 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.567195 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.567615 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.572164 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.587363 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.823521 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-p977t"] Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.827823 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.841916 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-p977t\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.841980 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-config\") pod \"dnsmasq-dns-59cf4bdb65-p977t\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.842083 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dvw4\" (UniqueName: \"kubernetes.io/projected/308eec4b-712a-4be1-af17-846432557cfb-kube-api-access-7dvw4\") pod \"dnsmasq-dns-59cf4bdb65-p977t\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.842135 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-p977t\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.842205 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-p977t\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.842273 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-p977t\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.849137 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-p977t"] Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.946590 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-p977t\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.946708 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-p977t\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.946746 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-config\") pod \"dnsmasq-dns-59cf4bdb65-p977t\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.946789 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dvw4\" (UniqueName: \"kubernetes.io/projected/308eec4b-712a-4be1-af17-846432557cfb-kube-api-access-7dvw4\") pod \"dnsmasq-dns-59cf4bdb65-p977t\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.946812 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-p977t\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.946848 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-p977t\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.948403 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-p977t\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.948576 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-config\") pod \"dnsmasq-dns-59cf4bdb65-p977t\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.948586 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-p977t\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.949313 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-p977t\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.949330 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-p977t\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:57 crc kubenswrapper[4631]: I1204 17:53:57.968895 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dvw4\" (UniqueName: \"kubernetes.io/projected/308eec4b-712a-4be1-af17-846432557cfb-kube-api-access-7dvw4\") pod \"dnsmasq-dns-59cf4bdb65-p977t\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:58 crc kubenswrapper[4631]: I1204 17:53:58.038535 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"73bece06-ddcd-4bd7-9f77-1c7551dd5c10","Type":"ContainerStarted","Data":"e9e3a81f4ae9b34374ced952b5ff30f4588b2dbd8575af4d867733dc1b8d37ec"} Dec 04 17:53:58 crc kubenswrapper[4631]: I1204 17:53:58.148010 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:53:58 crc kubenswrapper[4631]: I1204 17:53:58.682006 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.6819898 podStartE2EDuration="2.6819898s" podCreationTimestamp="2025-12-04 17:53:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:53:58.060515452 +0000 UTC m=+1568.092757480" watchObservedRunningTime="2025-12-04 17:53:58.6819898 +0000 UTC m=+1568.714231798" Dec 04 17:53:58 crc kubenswrapper[4631]: I1204 17:53:58.688689 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-p977t"] Dec 04 17:53:59 crc kubenswrapper[4631]: I1204 17:53:59.049013 4631 generic.go:334] "Generic (PLEG): container finished" podID="308eec4b-712a-4be1-af17-846432557cfb" containerID="08377cf13fccbec161eadd9db0f1298e6eed7891df03115307a31df519942da3" exitCode=0 Dec 04 17:53:59 crc kubenswrapper[4631]: I1204 17:53:59.049057 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" event={"ID":"308eec4b-712a-4be1-af17-846432557cfb","Type":"ContainerDied","Data":"08377cf13fccbec161eadd9db0f1298e6eed7891df03115307a31df519942da3"} Dec 04 17:53:59 crc kubenswrapper[4631]: I1204 17:53:59.049360 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" event={"ID":"308eec4b-712a-4be1-af17-846432557cfb","Type":"ContainerStarted","Data":"30d5c96d86a3d22d7a4a43d6327163b4d189aaf9c4dca499baa744d2432e0f61"} Dec 04 17:54:00 crc kubenswrapper[4631]: I1204 17:54:00.060712 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" event={"ID":"308eec4b-712a-4be1-af17-846432557cfb","Type":"ContainerStarted","Data":"7438a7e0f1afc74a44acb14c1f75a6ca23208ca2a6646225a9f779cb3d9b0a4b"} Dec 04 17:54:00 crc kubenswrapper[4631]: I1204 17:54:00.060979 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:54:00 crc kubenswrapper[4631]: I1204 17:54:00.080564 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" podStartSLOduration=3.080548532 podStartE2EDuration="3.080548532s" podCreationTimestamp="2025-12-04 17:53:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:54:00.077305694 +0000 UTC m=+1570.109547692" watchObservedRunningTime="2025-12-04 17:54:00.080548532 +0000 UTC m=+1570.112790530" Dec 04 17:54:00 crc kubenswrapper[4631]: I1204 17:54:00.270165 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 04 17:54:00 crc kubenswrapper[4631]: I1204 17:54:00.270401 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="db0e53ae-f2fd-47d8-a073-694e7894bcb4" containerName="nova-api-log" containerID="cri-o://f29aaaa98fd50c41397ce6ef7ed3a792e21c1ded1594cc217bd6f1dabc4cf0db" gracePeriod=30 Dec 04 17:54:00 crc kubenswrapper[4631]: I1204 17:54:00.270679 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="db0e53ae-f2fd-47d8-a073-694e7894bcb4" containerName="nova-api-api" containerID="cri-o://4b3821d02895a992cd451dac29931706c05ee8d771772452cfbaeada7e433833" gracePeriod=30 Dec 04 17:54:00 crc kubenswrapper[4631]: I1204 17:54:00.615192 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:54:00 crc kubenswrapper[4631]: I1204 17:54:00.615753 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerName="ceilometer-central-agent" containerID="cri-o://bc9878ef321b33e1587e2d83caae942a951d4649f1511d123b650ac7a4e7f99a" gracePeriod=30 Dec 04 17:54:00 crc kubenswrapper[4631]: I1204 17:54:00.615869 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerName="ceilometer-notification-agent" containerID="cri-o://ac0ae94d97f0b99afc56faf0356cd55bcc89519d01482264c8fa0d43ea835a34" gracePeriod=30 Dec 04 17:54:00 crc kubenswrapper[4631]: I1204 17:54:00.615905 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerName="sg-core" containerID="cri-o://87a1c288aa47405660efd06fd69bbaa58c9fefc897a3f57e59241674c01c15d3" gracePeriod=30 Dec 04 17:54:00 crc kubenswrapper[4631]: I1204 17:54:00.615937 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerName="proxy-httpd" containerID="cri-o://37e893a066073a9be291a258223d90cbc5a147bc0b8e857175d4f0424b72d062" gracePeriod=30 Dec 04 17:54:01 crc kubenswrapper[4631]: I1204 17:54:01.074104 4631 generic.go:334] "Generic (PLEG): container finished" podID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerID="37e893a066073a9be291a258223d90cbc5a147bc0b8e857175d4f0424b72d062" exitCode=0 Dec 04 17:54:01 crc kubenswrapper[4631]: I1204 17:54:01.074136 4631 generic.go:334] "Generic (PLEG): container finished" podID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerID="87a1c288aa47405660efd06fd69bbaa58c9fefc897a3f57e59241674c01c15d3" exitCode=2 Dec 04 17:54:01 crc kubenswrapper[4631]: I1204 17:54:01.074188 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dce8f67a-41be-41df-b562-c3ed6bfa64f9","Type":"ContainerDied","Data":"37e893a066073a9be291a258223d90cbc5a147bc0b8e857175d4f0424b72d062"} Dec 04 17:54:01 crc kubenswrapper[4631]: I1204 17:54:01.074227 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dce8f67a-41be-41df-b562-c3ed6bfa64f9","Type":"ContainerDied","Data":"87a1c288aa47405660efd06fd69bbaa58c9fefc897a3f57e59241674c01c15d3"} Dec 04 17:54:01 crc kubenswrapper[4631]: I1204 17:54:01.077555 4631 generic.go:334] "Generic (PLEG): container finished" podID="db0e53ae-f2fd-47d8-a073-694e7894bcb4" containerID="f29aaaa98fd50c41397ce6ef7ed3a792e21c1ded1594cc217bd6f1dabc4cf0db" exitCode=143 Dec 04 17:54:01 crc kubenswrapper[4631]: I1204 17:54:01.077635 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"db0e53ae-f2fd-47d8-a073-694e7894bcb4","Type":"ContainerDied","Data":"f29aaaa98fd50c41397ce6ef7ed3a792e21c1ded1594cc217bd6f1dabc4cf0db"} Dec 04 17:54:01 crc kubenswrapper[4631]: I1204 17:54:01.422401 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:54:02 crc kubenswrapper[4631]: I1204 17:54:02.089521 4631 generic.go:334] "Generic (PLEG): container finished" podID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerID="bc9878ef321b33e1587e2d83caae942a951d4649f1511d123b650ac7a4e7f99a" exitCode=0 Dec 04 17:54:02 crc kubenswrapper[4631]: I1204 17:54:02.089555 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dce8f67a-41be-41df-b562-c3ed6bfa64f9","Type":"ContainerDied","Data":"bc9878ef321b33e1587e2d83caae942a951d4649f1511d123b650ac7a4e7f99a"} Dec 04 17:54:02 crc kubenswrapper[4631]: I1204 17:54:02.239286 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:54:02 crc kubenswrapper[4631]: E1204 17:54:02.239683 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.729916 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.875168 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-ceilometer-tls-certs\") pod \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.875216 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dce8f67a-41be-41df-b562-c3ed6bfa64f9-run-httpd\") pod \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.875283 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-sg-core-conf-yaml\") pod \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.875308 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-scripts\") pod \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.875402 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6fp7\" (UniqueName: \"kubernetes.io/projected/dce8f67a-41be-41df-b562-c3ed6bfa64f9-kube-api-access-z6fp7\") pod \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.875421 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-config-data\") pod \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.875437 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dce8f67a-41be-41df-b562-c3ed6bfa64f9-log-httpd\") pod \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.875488 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-combined-ca-bundle\") pod \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\" (UID: \"dce8f67a-41be-41df-b562-c3ed6bfa64f9\") " Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.878779 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dce8f67a-41be-41df-b562-c3ed6bfa64f9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "dce8f67a-41be-41df-b562-c3ed6bfa64f9" (UID: "dce8f67a-41be-41df-b562-c3ed6bfa64f9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.880539 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dce8f67a-41be-41df-b562-c3ed6bfa64f9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "dce8f67a-41be-41df-b562-c3ed6bfa64f9" (UID: "dce8f67a-41be-41df-b562-c3ed6bfa64f9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.884687 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-scripts" (OuterVolumeSpecName: "scripts") pod "dce8f67a-41be-41df-b562-c3ed6bfa64f9" (UID: "dce8f67a-41be-41df-b562-c3ed6bfa64f9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.888912 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dce8f67a-41be-41df-b562-c3ed6bfa64f9-kube-api-access-z6fp7" (OuterVolumeSpecName: "kube-api-access-z6fp7") pod "dce8f67a-41be-41df-b562-c3ed6bfa64f9" (UID: "dce8f67a-41be-41df-b562-c3ed6bfa64f9"). InnerVolumeSpecName "kube-api-access-z6fp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.931526 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "dce8f67a-41be-41df-b562-c3ed6bfa64f9" (UID: "dce8f67a-41be-41df-b562-c3ed6bfa64f9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.942653 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.977864 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6fp7\" (UniqueName: \"kubernetes.io/projected/dce8f67a-41be-41df-b562-c3ed6bfa64f9-kube-api-access-z6fp7\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.977907 4631 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dce8f67a-41be-41df-b562-c3ed6bfa64f9-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.977919 4631 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dce8f67a-41be-41df-b562-c3ed6bfa64f9-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.977929 4631 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:03 crc kubenswrapper[4631]: I1204 17:54:03.977940 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.005386 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dce8f67a-41be-41df-b562-c3ed6bfa64f9" (UID: "dce8f67a-41be-41df-b562-c3ed6bfa64f9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.012447 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-config-data" (OuterVolumeSpecName: "config-data") pod "dce8f67a-41be-41df-b562-c3ed6bfa64f9" (UID: "dce8f67a-41be-41df-b562-c3ed6bfa64f9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.012666 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "dce8f67a-41be-41df-b562-c3ed6bfa64f9" (UID: "dce8f67a-41be-41df-b562-c3ed6bfa64f9"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.079975 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db0e53ae-f2fd-47d8-a073-694e7894bcb4-logs\") pod \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\" (UID: \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\") " Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.080080 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db0e53ae-f2fd-47d8-a073-694e7894bcb4-config-data\") pod \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\" (UID: \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\") " Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.080242 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gk8sz\" (UniqueName: \"kubernetes.io/projected/db0e53ae-f2fd-47d8-a073-694e7894bcb4-kube-api-access-gk8sz\") pod \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\" (UID: \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\") " Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.080576 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db0e53ae-f2fd-47d8-a073-694e7894bcb4-combined-ca-bundle\") pod \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\" (UID: \"db0e53ae-f2fd-47d8-a073-694e7894bcb4\") " Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.080861 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db0e53ae-f2fd-47d8-a073-694e7894bcb4-logs" (OuterVolumeSpecName: "logs") pod "db0e53ae-f2fd-47d8-a073-694e7894bcb4" (UID: "db0e53ae-f2fd-47d8-a073-694e7894bcb4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.081222 4631 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.081240 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.081248 4631 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db0e53ae-f2fd-47d8-a073-694e7894bcb4-logs\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.081257 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dce8f67a-41be-41df-b562-c3ed6bfa64f9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.084803 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db0e53ae-f2fd-47d8-a073-694e7894bcb4-kube-api-access-gk8sz" (OuterVolumeSpecName: "kube-api-access-gk8sz") pod "db0e53ae-f2fd-47d8-a073-694e7894bcb4" (UID: "db0e53ae-f2fd-47d8-a073-694e7894bcb4"). InnerVolumeSpecName "kube-api-access-gk8sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.127900 4631 generic.go:334] "Generic (PLEG): container finished" podID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerID="ac0ae94d97f0b99afc56faf0356cd55bcc89519d01482264c8fa0d43ea835a34" exitCode=0 Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.127962 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dce8f67a-41be-41df-b562-c3ed6bfa64f9","Type":"ContainerDied","Data":"ac0ae94d97f0b99afc56faf0356cd55bcc89519d01482264c8fa0d43ea835a34"} Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.128007 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dce8f67a-41be-41df-b562-c3ed6bfa64f9","Type":"ContainerDied","Data":"4e0a31b399d7605024711913e76cf1740d97b53da5485164b598578da7cd9b8a"} Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.128024 4631 scope.go:117] "RemoveContainer" containerID="37e893a066073a9be291a258223d90cbc5a147bc0b8e857175d4f0424b72d062" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.128171 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.159012 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db0e53ae-f2fd-47d8-a073-694e7894bcb4-config-data" (OuterVolumeSpecName: "config-data") pod "db0e53ae-f2fd-47d8-a073-694e7894bcb4" (UID: "db0e53ae-f2fd-47d8-a073-694e7894bcb4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.159542 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db0e53ae-f2fd-47d8-a073-694e7894bcb4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "db0e53ae-f2fd-47d8-a073-694e7894bcb4" (UID: "db0e53ae-f2fd-47d8-a073-694e7894bcb4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.160478 4631 generic.go:334] "Generic (PLEG): container finished" podID="db0e53ae-f2fd-47d8-a073-694e7894bcb4" containerID="4b3821d02895a992cd451dac29931706c05ee8d771772452cfbaeada7e433833" exitCode=0 Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.160531 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"db0e53ae-f2fd-47d8-a073-694e7894bcb4","Type":"ContainerDied","Data":"4b3821d02895a992cd451dac29931706c05ee8d771772452cfbaeada7e433833"} Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.160563 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"db0e53ae-f2fd-47d8-a073-694e7894bcb4","Type":"ContainerDied","Data":"49f7fefbd0dbdc59c92c024cbe6234181851c1b9846c9df356f503cca0625a4c"} Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.160634 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.182760 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gk8sz\" (UniqueName: \"kubernetes.io/projected/db0e53ae-f2fd-47d8-a073-694e7894bcb4-kube-api-access-gk8sz\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.182786 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db0e53ae-f2fd-47d8-a073-694e7894bcb4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.182795 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db0e53ae-f2fd-47d8-a073-694e7894bcb4-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.260568 4631 scope.go:117] "RemoveContainer" containerID="87a1c288aa47405660efd06fd69bbaa58c9fefc897a3f57e59241674c01c15d3" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.288782 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.297600 4631 scope.go:117] "RemoveContainer" containerID="ac0ae94d97f0b99afc56faf0356cd55bcc89519d01482264c8fa0d43ea835a34" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.303841 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.327043 4631 scope.go:117] "RemoveContainer" containerID="bc9878ef321b33e1587e2d83caae942a951d4649f1511d123b650ac7a4e7f99a" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.329589 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.346460 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.364669 4631 scope.go:117] "RemoveContainer" containerID="37e893a066073a9be291a258223d90cbc5a147bc0b8e857175d4f0424b72d062" Dec 04 17:54:04 crc kubenswrapper[4631]: E1204 17:54:04.365165 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37e893a066073a9be291a258223d90cbc5a147bc0b8e857175d4f0424b72d062\": container with ID starting with 37e893a066073a9be291a258223d90cbc5a147bc0b8e857175d4f0424b72d062 not found: ID does not exist" containerID="37e893a066073a9be291a258223d90cbc5a147bc0b8e857175d4f0424b72d062" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.365202 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37e893a066073a9be291a258223d90cbc5a147bc0b8e857175d4f0424b72d062"} err="failed to get container status \"37e893a066073a9be291a258223d90cbc5a147bc0b8e857175d4f0424b72d062\": rpc error: code = NotFound desc = could not find container \"37e893a066073a9be291a258223d90cbc5a147bc0b8e857175d4f0424b72d062\": container with ID starting with 37e893a066073a9be291a258223d90cbc5a147bc0b8e857175d4f0424b72d062 not found: ID does not exist" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.365227 4631 scope.go:117] "RemoveContainer" containerID="87a1c288aa47405660efd06fd69bbaa58c9fefc897a3f57e59241674c01c15d3" Dec 04 17:54:04 crc kubenswrapper[4631]: E1204 17:54:04.365465 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87a1c288aa47405660efd06fd69bbaa58c9fefc897a3f57e59241674c01c15d3\": container with ID starting with 87a1c288aa47405660efd06fd69bbaa58c9fefc897a3f57e59241674c01c15d3 not found: ID does not exist" containerID="87a1c288aa47405660efd06fd69bbaa58c9fefc897a3f57e59241674c01c15d3" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.365493 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87a1c288aa47405660efd06fd69bbaa58c9fefc897a3f57e59241674c01c15d3"} err="failed to get container status \"87a1c288aa47405660efd06fd69bbaa58c9fefc897a3f57e59241674c01c15d3\": rpc error: code = NotFound desc = could not find container \"87a1c288aa47405660efd06fd69bbaa58c9fefc897a3f57e59241674c01c15d3\": container with ID starting with 87a1c288aa47405660efd06fd69bbaa58c9fefc897a3f57e59241674c01c15d3 not found: ID does not exist" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.365510 4631 scope.go:117] "RemoveContainer" containerID="ac0ae94d97f0b99afc56faf0356cd55bcc89519d01482264c8fa0d43ea835a34" Dec 04 17:54:04 crc kubenswrapper[4631]: E1204 17:54:04.365722 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac0ae94d97f0b99afc56faf0356cd55bcc89519d01482264c8fa0d43ea835a34\": container with ID starting with ac0ae94d97f0b99afc56faf0356cd55bcc89519d01482264c8fa0d43ea835a34 not found: ID does not exist" containerID="ac0ae94d97f0b99afc56faf0356cd55bcc89519d01482264c8fa0d43ea835a34" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.365750 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac0ae94d97f0b99afc56faf0356cd55bcc89519d01482264c8fa0d43ea835a34"} err="failed to get container status \"ac0ae94d97f0b99afc56faf0356cd55bcc89519d01482264c8fa0d43ea835a34\": rpc error: code = NotFound desc = could not find container \"ac0ae94d97f0b99afc56faf0356cd55bcc89519d01482264c8fa0d43ea835a34\": container with ID starting with ac0ae94d97f0b99afc56faf0356cd55bcc89519d01482264c8fa0d43ea835a34 not found: ID does not exist" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.365768 4631 scope.go:117] "RemoveContainer" containerID="bc9878ef321b33e1587e2d83caae942a951d4649f1511d123b650ac7a4e7f99a" Dec 04 17:54:04 crc kubenswrapper[4631]: E1204 17:54:04.365966 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc9878ef321b33e1587e2d83caae942a951d4649f1511d123b650ac7a4e7f99a\": container with ID starting with bc9878ef321b33e1587e2d83caae942a951d4649f1511d123b650ac7a4e7f99a not found: ID does not exist" containerID="bc9878ef321b33e1587e2d83caae942a951d4649f1511d123b650ac7a4e7f99a" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.365993 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc9878ef321b33e1587e2d83caae942a951d4649f1511d123b650ac7a4e7f99a"} err="failed to get container status \"bc9878ef321b33e1587e2d83caae942a951d4649f1511d123b650ac7a4e7f99a\": rpc error: code = NotFound desc = could not find container \"bc9878ef321b33e1587e2d83caae942a951d4649f1511d123b650ac7a4e7f99a\": container with ID starting with bc9878ef321b33e1587e2d83caae942a951d4649f1511d123b650ac7a4e7f99a not found: ID does not exist" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.366013 4631 scope.go:117] "RemoveContainer" containerID="4b3821d02895a992cd451dac29931706c05ee8d771772452cfbaeada7e433833" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.367438 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 04 17:54:04 crc kubenswrapper[4631]: E1204 17:54:04.367885 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerName="ceilometer-central-agent" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.367903 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerName="ceilometer-central-agent" Dec 04 17:54:04 crc kubenswrapper[4631]: E1204 17:54:04.367923 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerName="ceilometer-notification-agent" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.367930 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerName="ceilometer-notification-agent" Dec 04 17:54:04 crc kubenswrapper[4631]: E1204 17:54:04.367943 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerName="sg-core" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.367950 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerName="sg-core" Dec 04 17:54:04 crc kubenswrapper[4631]: E1204 17:54:04.367968 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerName="proxy-httpd" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.367974 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerName="proxy-httpd" Dec 04 17:54:04 crc kubenswrapper[4631]: E1204 17:54:04.367982 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db0e53ae-f2fd-47d8-a073-694e7894bcb4" containerName="nova-api-api" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.367988 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="db0e53ae-f2fd-47d8-a073-694e7894bcb4" containerName="nova-api-api" Dec 04 17:54:04 crc kubenswrapper[4631]: E1204 17:54:04.368004 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db0e53ae-f2fd-47d8-a073-694e7894bcb4" containerName="nova-api-log" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.368011 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="db0e53ae-f2fd-47d8-a073-694e7894bcb4" containerName="nova-api-log" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.368178 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerName="proxy-httpd" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.368189 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerName="ceilometer-central-agent" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.368207 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerName="ceilometer-notification-agent" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.368217 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="db0e53ae-f2fd-47d8-a073-694e7894bcb4" containerName="nova-api-log" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.368227 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="db0e53ae-f2fd-47d8-a073-694e7894bcb4" containerName="nova-api-api" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.368237 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" containerName="sg-core" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.369243 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.376073 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.376364 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.376503 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.387761 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.405573 4631 scope.go:117] "RemoveContainer" containerID="f29aaaa98fd50c41397ce6ef7ed3a792e21c1ded1594cc217bd6f1dabc4cf0db" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.406666 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.414299 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.424895 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.425146 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.425260 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.433772 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.454502 4631 scope.go:117] "RemoveContainer" containerID="4b3821d02895a992cd451dac29931706c05ee8d771772452cfbaeada7e433833" Dec 04 17:54:04 crc kubenswrapper[4631]: E1204 17:54:04.455003 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b3821d02895a992cd451dac29931706c05ee8d771772452cfbaeada7e433833\": container with ID starting with 4b3821d02895a992cd451dac29931706c05ee8d771772452cfbaeada7e433833 not found: ID does not exist" containerID="4b3821d02895a992cd451dac29931706c05ee8d771772452cfbaeada7e433833" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.455056 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b3821d02895a992cd451dac29931706c05ee8d771772452cfbaeada7e433833"} err="failed to get container status \"4b3821d02895a992cd451dac29931706c05ee8d771772452cfbaeada7e433833\": rpc error: code = NotFound desc = could not find container \"4b3821d02895a992cd451dac29931706c05ee8d771772452cfbaeada7e433833\": container with ID starting with 4b3821d02895a992cd451dac29931706c05ee8d771772452cfbaeada7e433833 not found: ID does not exist" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.455089 4631 scope.go:117] "RemoveContainer" containerID="f29aaaa98fd50c41397ce6ef7ed3a792e21c1ded1594cc217bd6f1dabc4cf0db" Dec 04 17:54:04 crc kubenswrapper[4631]: E1204 17:54:04.455446 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f29aaaa98fd50c41397ce6ef7ed3a792e21c1ded1594cc217bd6f1dabc4cf0db\": container with ID starting with f29aaaa98fd50c41397ce6ef7ed3a792e21c1ded1594cc217bd6f1dabc4cf0db not found: ID does not exist" containerID="f29aaaa98fd50c41397ce6ef7ed3a792e21c1ded1594cc217bd6f1dabc4cf0db" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.455484 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f29aaaa98fd50c41397ce6ef7ed3a792e21c1ded1594cc217bd6f1dabc4cf0db"} err="failed to get container status \"f29aaaa98fd50c41397ce6ef7ed3a792e21c1ded1594cc217bd6f1dabc4cf0db\": rpc error: code = NotFound desc = could not find container \"f29aaaa98fd50c41397ce6ef7ed3a792e21c1ded1594cc217bd6f1dabc4cf0db\": container with ID starting with f29aaaa98fd50c41397ce6ef7ed3a792e21c1ded1594cc217bd6f1dabc4cf0db not found: ID does not exist" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.489097 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ada8ef25-3d55-472a-aedd-ec8af72558ad-logs\") pod \"nova-api-0\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.489169 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.489261 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4871a6a-2cea-402a-9dfe-e72887258bb5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.489286 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4871a6a-2cea-402a-9dfe-e72887258bb5-scripts\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.490079 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-public-tls-certs\") pod \"nova-api-0\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.490124 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b4871a6a-2cea-402a-9dfe-e72887258bb5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.490148 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7n9gq\" (UniqueName: \"kubernetes.io/projected/b4871a6a-2cea-402a-9dfe-e72887258bb5-kube-api-access-7n9gq\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.490202 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4871a6a-2cea-402a-9dfe-e72887258bb5-config-data\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.490275 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4871a6a-2cea-402a-9dfe-e72887258bb5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.490346 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lgpp\" (UniqueName: \"kubernetes.io/projected/ada8ef25-3d55-472a-aedd-ec8af72558ad-kube-api-access-6lgpp\") pod \"nova-api-0\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.490428 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.490456 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-config-data\") pod \"nova-api-0\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.490544 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4871a6a-2cea-402a-9dfe-e72887258bb5-log-httpd\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.490695 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4871a6a-2cea-402a-9dfe-e72887258bb5-run-httpd\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.592712 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.592823 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4871a6a-2cea-402a-9dfe-e72887258bb5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.592852 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4871a6a-2cea-402a-9dfe-e72887258bb5-scripts\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.592890 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-public-tls-certs\") pod \"nova-api-0\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.592908 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b4871a6a-2cea-402a-9dfe-e72887258bb5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.592931 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7n9gq\" (UniqueName: \"kubernetes.io/projected/b4871a6a-2cea-402a-9dfe-e72887258bb5-kube-api-access-7n9gq\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.592971 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4871a6a-2cea-402a-9dfe-e72887258bb5-config-data\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.592992 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4871a6a-2cea-402a-9dfe-e72887258bb5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.593022 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lgpp\" (UniqueName: \"kubernetes.io/projected/ada8ef25-3d55-472a-aedd-ec8af72558ad-kube-api-access-6lgpp\") pod \"nova-api-0\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.593049 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.593067 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-config-data\") pod \"nova-api-0\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.593098 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4871a6a-2cea-402a-9dfe-e72887258bb5-log-httpd\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.593138 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4871a6a-2cea-402a-9dfe-e72887258bb5-run-httpd\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.593178 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ada8ef25-3d55-472a-aedd-ec8af72558ad-logs\") pod \"nova-api-0\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.593622 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ada8ef25-3d55-472a-aedd-ec8af72558ad-logs\") pod \"nova-api-0\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.593975 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4871a6a-2cea-402a-9dfe-e72887258bb5-log-httpd\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.594233 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4871a6a-2cea-402a-9dfe-e72887258bb5-run-httpd\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.597911 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.598789 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-config-data\") pod \"nova-api-0\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.598832 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4871a6a-2cea-402a-9dfe-e72887258bb5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.599356 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4871a6a-2cea-402a-9dfe-e72887258bb5-scripts\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.599993 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4871a6a-2cea-402a-9dfe-e72887258bb5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.601426 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4871a6a-2cea-402a-9dfe-e72887258bb5-config-data\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.616141 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b4871a6a-2cea-402a-9dfe-e72887258bb5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.618065 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.619141 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-public-tls-certs\") pod \"nova-api-0\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.621739 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lgpp\" (UniqueName: \"kubernetes.io/projected/ada8ef25-3d55-472a-aedd-ec8af72558ad-kube-api-access-6lgpp\") pod \"nova-api-0\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.621865 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7n9gq\" (UniqueName: \"kubernetes.io/projected/b4871a6a-2cea-402a-9dfe-e72887258bb5-kube-api-access-7n9gq\") pod \"ceilometer-0\" (UID: \"b4871a6a-2cea-402a-9dfe-e72887258bb5\") " pod="openstack/ceilometer-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.691969 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 17:54:04 crc kubenswrapper[4631]: I1204 17:54:04.749922 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 04 17:54:05 crc kubenswrapper[4631]: I1204 17:54:05.214239 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 04 17:54:05 crc kubenswrapper[4631]: I1204 17:54:05.343280 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 04 17:54:06 crc kubenswrapper[4631]: I1204 17:54:06.186407 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ada8ef25-3d55-472a-aedd-ec8af72558ad","Type":"ContainerStarted","Data":"4423c875f1164156c01d4e39a715869b73180df4c717ca6bb719eb3bf5cffcd6"} Dec 04 17:54:06 crc kubenswrapper[4631]: I1204 17:54:06.186740 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ada8ef25-3d55-472a-aedd-ec8af72558ad","Type":"ContainerStarted","Data":"6b1ef5d8b0d8d05d2cf7fb5fb279f4b6e2bf0fec0c0bfad15bfb0a78ba7d4a00"} Dec 04 17:54:06 crc kubenswrapper[4631]: I1204 17:54:06.186756 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ada8ef25-3d55-472a-aedd-ec8af72558ad","Type":"ContainerStarted","Data":"a8db2dbfa9623068b630a772bc4dbad3172e9a01043a16ab0ce0673a9f8e77e1"} Dec 04 17:54:06 crc kubenswrapper[4631]: I1204 17:54:06.190111 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4871a6a-2cea-402a-9dfe-e72887258bb5","Type":"ContainerStarted","Data":"f7709d250001e64fcbf5176a2b834cb041f7fc809b213b90c3ac73c842e978a7"} Dec 04 17:54:06 crc kubenswrapper[4631]: I1204 17:54:06.211319 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.21129912 podStartE2EDuration="2.21129912s" podCreationTimestamp="2025-12-04 17:54:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:54:06.203523889 +0000 UTC m=+1576.235765887" watchObservedRunningTime="2025-12-04 17:54:06.21129912 +0000 UTC m=+1576.243541128" Dec 04 17:54:06 crc kubenswrapper[4631]: I1204 17:54:06.250393 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db0e53ae-f2fd-47d8-a073-694e7894bcb4" path="/var/lib/kubelet/pods/db0e53ae-f2fd-47d8-a073-694e7894bcb4/volumes" Dec 04 17:54:06 crc kubenswrapper[4631]: I1204 17:54:06.251197 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dce8f67a-41be-41df-b562-c3ed6bfa64f9" path="/var/lib/kubelet/pods/dce8f67a-41be-41df-b562-c3ed6bfa64f9/volumes" Dec 04 17:54:06 crc kubenswrapper[4631]: I1204 17:54:06.452472 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:54:06 crc kubenswrapper[4631]: I1204 17:54:06.553922 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.199126 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4871a6a-2cea-402a-9dfe-e72887258bb5","Type":"ContainerStarted","Data":"7c7a09a1d2d55c43d03584b318858d7431eab1d145bdd5dd9660d30d83e00d19"} Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.215055 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.370497 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-slhqd"] Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.373225 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-slhqd" Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.381300 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.382170 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.383973 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-slhqd"] Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.479651 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-config-data\") pod \"nova-cell1-cell-mapping-slhqd\" (UID: \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\") " pod="openstack/nova-cell1-cell-mapping-slhqd" Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.479790 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-scripts\") pod \"nova-cell1-cell-mapping-slhqd\" (UID: \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\") " pod="openstack/nova-cell1-cell-mapping-slhqd" Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.479850 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-slhqd\" (UID: \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\") " pod="openstack/nova-cell1-cell-mapping-slhqd" Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.479899 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff4rg\" (UniqueName: \"kubernetes.io/projected/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-kube-api-access-ff4rg\") pod \"nova-cell1-cell-mapping-slhqd\" (UID: \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\") " pod="openstack/nova-cell1-cell-mapping-slhqd" Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.581607 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff4rg\" (UniqueName: \"kubernetes.io/projected/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-kube-api-access-ff4rg\") pod \"nova-cell1-cell-mapping-slhqd\" (UID: \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\") " pod="openstack/nova-cell1-cell-mapping-slhqd" Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.581831 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-config-data\") pod \"nova-cell1-cell-mapping-slhqd\" (UID: \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\") " pod="openstack/nova-cell1-cell-mapping-slhqd" Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.582696 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-scripts\") pod \"nova-cell1-cell-mapping-slhqd\" (UID: \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\") " pod="openstack/nova-cell1-cell-mapping-slhqd" Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.582838 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-slhqd\" (UID: \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\") " pod="openstack/nova-cell1-cell-mapping-slhqd" Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.587913 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-scripts\") pod \"nova-cell1-cell-mapping-slhqd\" (UID: \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\") " pod="openstack/nova-cell1-cell-mapping-slhqd" Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.588152 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-config-data\") pod \"nova-cell1-cell-mapping-slhqd\" (UID: \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\") " pod="openstack/nova-cell1-cell-mapping-slhqd" Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.602756 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-slhqd\" (UID: \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\") " pod="openstack/nova-cell1-cell-mapping-slhqd" Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.603564 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff4rg\" (UniqueName: \"kubernetes.io/projected/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-kube-api-access-ff4rg\") pod \"nova-cell1-cell-mapping-slhqd\" (UID: \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\") " pod="openstack/nova-cell1-cell-mapping-slhqd" Dec 04 17:54:07 crc kubenswrapper[4631]: I1204 17:54:07.696344 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-slhqd" Dec 04 17:54:08 crc kubenswrapper[4631]: I1204 17:54:08.149507 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:54:08 crc kubenswrapper[4631]: I1204 17:54:08.246917 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-tcd4d"] Dec 04 17:54:08 crc kubenswrapper[4631]: I1204 17:54:08.247129 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" podUID="bc96d3f1-aae9-4aac-bef8-c745f17a1ffb" containerName="dnsmasq-dns" containerID="cri-o://746d57cf5231329cd3a205b43e21fa72819c4012dd19093bf303f4acb6845144" gracePeriod=10 Dec 04 17:54:08 crc kubenswrapper[4631]: I1204 17:54:08.343327 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" podUID="bc96d3f1-aae9-4aac-bef8-c745f17a1ffb" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.190:5353: connect: connection refused" Dec 04 17:54:08 crc kubenswrapper[4631]: I1204 17:54:08.453094 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-slhqd"] Dec 04 17:54:08 crc kubenswrapper[4631]: I1204 17:54:08.896458 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.023146 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-ovsdbserver-nb\") pod \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.023499 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-dns-swift-storage-0\") pod \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.023540 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-ovsdbserver-sb\") pod \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.023667 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zccmx\" (UniqueName: \"kubernetes.io/projected/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-kube-api-access-zccmx\") pod \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.023700 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-dns-svc\") pod \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.023716 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-config\") pod \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\" (UID: \"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb\") " Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.031350 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-kube-api-access-zccmx" (OuterVolumeSpecName: "kube-api-access-zccmx") pod "bc96d3f1-aae9-4aac-bef8-c745f17a1ffb" (UID: "bc96d3f1-aae9-4aac-bef8-c745f17a1ffb"). InnerVolumeSpecName "kube-api-access-zccmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.125394 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zccmx\" (UniqueName: \"kubernetes.io/projected/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-kube-api-access-zccmx\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.158241 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bc96d3f1-aae9-4aac-bef8-c745f17a1ffb" (UID: "bc96d3f1-aae9-4aac-bef8-c745f17a1ffb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.186388 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "bc96d3f1-aae9-4aac-bef8-c745f17a1ffb" (UID: "bc96d3f1-aae9-4aac-bef8-c745f17a1ffb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.191746 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-config" (OuterVolumeSpecName: "config") pod "bc96d3f1-aae9-4aac-bef8-c745f17a1ffb" (UID: "bc96d3f1-aae9-4aac-bef8-c745f17a1ffb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.208211 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bc96d3f1-aae9-4aac-bef8-c745f17a1ffb" (UID: "bc96d3f1-aae9-4aac-bef8-c745f17a1ffb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.213573 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bc96d3f1-aae9-4aac-bef8-c745f17a1ffb" (UID: "bc96d3f1-aae9-4aac-bef8-c745f17a1ffb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.227627 4631 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.227658 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.227668 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.227678 4631 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.227687 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.256043 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4871a6a-2cea-402a-9dfe-e72887258bb5","Type":"ContainerStarted","Data":"6d32dd01fac970fe17585e24a02f3213d1b2e18a087264d675e628c1f92a78ce"} Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.261600 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-slhqd" event={"ID":"10901c62-65f7-43ed-ab1e-93c30cb5b5f9","Type":"ContainerStarted","Data":"987727ba72f5119a2838995bf36f03f672f805ba31748431130dba383bbb0ef1"} Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.261647 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-slhqd" event={"ID":"10901c62-65f7-43ed-ab1e-93c30cb5b5f9","Type":"ContainerStarted","Data":"b10848cc4afd644c86cd024564c14e95f3ce3a1bc3b5ac2d1fba17cd04c719fb"} Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.271428 4631 generic.go:334] "Generic (PLEG): container finished" podID="bc96d3f1-aae9-4aac-bef8-c745f17a1ffb" containerID="746d57cf5231329cd3a205b43e21fa72819c4012dd19093bf303f4acb6845144" exitCode=0 Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.271469 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" event={"ID":"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb","Type":"ContainerDied","Data":"746d57cf5231329cd3a205b43e21fa72819c4012dd19093bf303f4acb6845144"} Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.271495 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" event={"ID":"bc96d3f1-aae9-4aac-bef8-c745f17a1ffb","Type":"ContainerDied","Data":"abebaa2f4046e51c9e193d31582a92889625e1a9072287929e08d11950b50347"} Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.271514 4631 scope.go:117] "RemoveContainer" containerID="746d57cf5231329cd3a205b43e21fa72819c4012dd19093bf303f4acb6845144" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.271632 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-tcd4d" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.287190 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-slhqd" podStartSLOduration=2.287166288 podStartE2EDuration="2.287166288s" podCreationTimestamp="2025-12-04 17:54:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:54:09.276309703 +0000 UTC m=+1579.308551691" watchObservedRunningTime="2025-12-04 17:54:09.287166288 +0000 UTC m=+1579.319408286" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.314415 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-tcd4d"] Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.316678 4631 scope.go:117] "RemoveContainer" containerID="1f4e202f91c152fafe1f3d6a072fe5384da653e8742304ecbc24edef73f1933b" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.325873 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-tcd4d"] Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.351991 4631 scope.go:117] "RemoveContainer" containerID="746d57cf5231329cd3a205b43e21fa72819c4012dd19093bf303f4acb6845144" Dec 04 17:54:09 crc kubenswrapper[4631]: E1204 17:54:09.352476 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"746d57cf5231329cd3a205b43e21fa72819c4012dd19093bf303f4acb6845144\": container with ID starting with 746d57cf5231329cd3a205b43e21fa72819c4012dd19093bf303f4acb6845144 not found: ID does not exist" containerID="746d57cf5231329cd3a205b43e21fa72819c4012dd19093bf303f4acb6845144" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.352522 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"746d57cf5231329cd3a205b43e21fa72819c4012dd19093bf303f4acb6845144"} err="failed to get container status \"746d57cf5231329cd3a205b43e21fa72819c4012dd19093bf303f4acb6845144\": rpc error: code = NotFound desc = could not find container \"746d57cf5231329cd3a205b43e21fa72819c4012dd19093bf303f4acb6845144\": container with ID starting with 746d57cf5231329cd3a205b43e21fa72819c4012dd19093bf303f4acb6845144 not found: ID does not exist" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.352554 4631 scope.go:117] "RemoveContainer" containerID="1f4e202f91c152fafe1f3d6a072fe5384da653e8742304ecbc24edef73f1933b" Dec 04 17:54:09 crc kubenswrapper[4631]: E1204 17:54:09.359853 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f4e202f91c152fafe1f3d6a072fe5384da653e8742304ecbc24edef73f1933b\": container with ID starting with 1f4e202f91c152fafe1f3d6a072fe5384da653e8742304ecbc24edef73f1933b not found: ID does not exist" containerID="1f4e202f91c152fafe1f3d6a072fe5384da653e8742304ecbc24edef73f1933b" Dec 04 17:54:09 crc kubenswrapper[4631]: I1204 17:54:09.359896 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f4e202f91c152fafe1f3d6a072fe5384da653e8742304ecbc24edef73f1933b"} err="failed to get container status \"1f4e202f91c152fafe1f3d6a072fe5384da653e8742304ecbc24edef73f1933b\": rpc error: code = NotFound desc = could not find container \"1f4e202f91c152fafe1f3d6a072fe5384da653e8742304ecbc24edef73f1933b\": container with ID starting with 1f4e202f91c152fafe1f3d6a072fe5384da653e8742304ecbc24edef73f1933b not found: ID does not exist" Dec 04 17:54:10 crc kubenswrapper[4631]: I1204 17:54:10.250728 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc96d3f1-aae9-4aac-bef8-c745f17a1ffb" path="/var/lib/kubelet/pods/bc96d3f1-aae9-4aac-bef8-c745f17a1ffb/volumes" Dec 04 17:54:10 crc kubenswrapper[4631]: I1204 17:54:10.281065 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4871a6a-2cea-402a-9dfe-e72887258bb5","Type":"ContainerStarted","Data":"05f43f30df8e79012696888fe9778218d282abfa10f83d6c19d2ea4ed95c59fb"} Dec 04 17:54:11 crc kubenswrapper[4631]: I1204 17:54:11.295348 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4871a6a-2cea-402a-9dfe-e72887258bb5","Type":"ContainerStarted","Data":"9dbe2530e10ac9a8e3f8d88778cc518e4a31b4b8d65e33ea9edddf94869f5da2"} Dec 04 17:54:11 crc kubenswrapper[4631]: I1204 17:54:11.297896 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 04 17:54:11 crc kubenswrapper[4631]: I1204 17:54:11.322481 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.216715857 podStartE2EDuration="7.322465932s" podCreationTimestamp="2025-12-04 17:54:04 +0000 UTC" firstStartedPulling="2025-12-04 17:54:05.367163128 +0000 UTC m=+1575.399405126" lastFinishedPulling="2025-12-04 17:54:10.472913203 +0000 UTC m=+1580.505155201" observedRunningTime="2025-12-04 17:54:11.315971566 +0000 UTC m=+1581.348213564" watchObservedRunningTime="2025-12-04 17:54:11.322465932 +0000 UTC m=+1581.354707920" Dec 04 17:54:14 crc kubenswrapper[4631]: I1204 17:54:14.240392 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:54:14 crc kubenswrapper[4631]: E1204 17:54:14.241092 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:54:14 crc kubenswrapper[4631]: I1204 17:54:14.326756 4631 generic.go:334] "Generic (PLEG): container finished" podID="10901c62-65f7-43ed-ab1e-93c30cb5b5f9" containerID="987727ba72f5119a2838995bf36f03f672f805ba31748431130dba383bbb0ef1" exitCode=0 Dec 04 17:54:14 crc kubenswrapper[4631]: I1204 17:54:14.326797 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-slhqd" event={"ID":"10901c62-65f7-43ed-ab1e-93c30cb5b5f9","Type":"ContainerDied","Data":"987727ba72f5119a2838995bf36f03f672f805ba31748431130dba383bbb0ef1"} Dec 04 17:54:14 crc kubenswrapper[4631]: I1204 17:54:14.692742 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 04 17:54:14 crc kubenswrapper[4631]: I1204 17:54:14.692791 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 04 17:54:15 crc kubenswrapper[4631]: I1204 17:54:15.709624 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ada8ef25-3d55-472a-aedd-ec8af72558ad" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.199:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 17:54:15 crc kubenswrapper[4631]: I1204 17:54:15.709694 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ada8ef25-3d55-472a-aedd-ec8af72558ad" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.199:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 17:54:15 crc kubenswrapper[4631]: I1204 17:54:15.720295 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-slhqd" Dec 04 17:54:15 crc kubenswrapper[4631]: I1204 17:54:15.877538 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-combined-ca-bundle\") pod \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\" (UID: \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\") " Dec 04 17:54:15 crc kubenswrapper[4631]: I1204 17:54:15.877665 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-scripts\") pod \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\" (UID: \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\") " Dec 04 17:54:15 crc kubenswrapper[4631]: I1204 17:54:15.877825 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ff4rg\" (UniqueName: \"kubernetes.io/projected/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-kube-api-access-ff4rg\") pod \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\" (UID: \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\") " Dec 04 17:54:15 crc kubenswrapper[4631]: I1204 17:54:15.877876 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-config-data\") pod \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\" (UID: \"10901c62-65f7-43ed-ab1e-93c30cb5b5f9\") " Dec 04 17:54:15 crc kubenswrapper[4631]: I1204 17:54:15.898515 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-scripts" (OuterVolumeSpecName: "scripts") pod "10901c62-65f7-43ed-ab1e-93c30cb5b5f9" (UID: "10901c62-65f7-43ed-ab1e-93c30cb5b5f9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:54:15 crc kubenswrapper[4631]: I1204 17:54:15.899535 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-kube-api-access-ff4rg" (OuterVolumeSpecName: "kube-api-access-ff4rg") pod "10901c62-65f7-43ed-ab1e-93c30cb5b5f9" (UID: "10901c62-65f7-43ed-ab1e-93c30cb5b5f9"). InnerVolumeSpecName "kube-api-access-ff4rg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:54:15 crc kubenswrapper[4631]: I1204 17:54:15.909706 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-config-data" (OuterVolumeSpecName: "config-data") pod "10901c62-65f7-43ed-ab1e-93c30cb5b5f9" (UID: "10901c62-65f7-43ed-ab1e-93c30cb5b5f9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:54:15 crc kubenswrapper[4631]: I1204 17:54:15.922455 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "10901c62-65f7-43ed-ab1e-93c30cb5b5f9" (UID: "10901c62-65f7-43ed-ab1e-93c30cb5b5f9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:54:15 crc kubenswrapper[4631]: I1204 17:54:15.980526 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ff4rg\" (UniqueName: \"kubernetes.io/projected/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-kube-api-access-ff4rg\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:15 crc kubenswrapper[4631]: I1204 17:54:15.980564 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:15 crc kubenswrapper[4631]: I1204 17:54:15.980576 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:15 crc kubenswrapper[4631]: I1204 17:54:15.980587 4631 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10901c62-65f7-43ed-ab1e-93c30cb5b5f9-scripts\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:16 crc kubenswrapper[4631]: I1204 17:54:16.346875 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-slhqd" event={"ID":"10901c62-65f7-43ed-ab1e-93c30cb5b5f9","Type":"ContainerDied","Data":"b10848cc4afd644c86cd024564c14e95f3ce3a1bc3b5ac2d1fba17cd04c719fb"} Dec 04 17:54:16 crc kubenswrapper[4631]: I1204 17:54:16.346923 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-slhqd" Dec 04 17:54:16 crc kubenswrapper[4631]: I1204 17:54:16.347209 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b10848cc4afd644c86cd024564c14e95f3ce3a1bc3b5ac2d1fba17cd04c719fb" Dec 04 17:54:16 crc kubenswrapper[4631]: I1204 17:54:16.528334 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 04 17:54:16 crc kubenswrapper[4631]: I1204 17:54:16.528617 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ada8ef25-3d55-472a-aedd-ec8af72558ad" containerName="nova-api-log" containerID="cri-o://6b1ef5d8b0d8d05d2cf7fb5fb279f4b6e2bf0fec0c0bfad15bfb0a78ba7d4a00" gracePeriod=30 Dec 04 17:54:16 crc kubenswrapper[4631]: I1204 17:54:16.528754 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ada8ef25-3d55-472a-aedd-ec8af72558ad" containerName="nova-api-api" containerID="cri-o://4423c875f1164156c01d4e39a715869b73180df4c717ca6bb719eb3bf5cffcd6" gracePeriod=30 Dec 04 17:54:16 crc kubenswrapper[4631]: I1204 17:54:16.565927 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 17:54:16 crc kubenswrapper[4631]: I1204 17:54:16.566160 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e" containerName="nova-scheduler-scheduler" containerID="cri-o://ed3a4215006d32029c189e7d0d23590233f9c8f12e197ab155e8019abbf3d828" gracePeriod=30 Dec 04 17:54:16 crc kubenswrapper[4631]: I1204 17:54:16.590838 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:54:16 crc kubenswrapper[4631]: I1204 17:54:16.591058 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="abf757d5-6165-4cb3-9e40-b0eec2920b02" containerName="nova-metadata-log" containerID="cri-o://86b19dcb6971fa3c48d1e270ff2cb133a09b5de1d1c819038d02434ff32618f2" gracePeriod=30 Dec 04 17:54:16 crc kubenswrapper[4631]: I1204 17:54:16.591185 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="abf757d5-6165-4cb3-9e40-b0eec2920b02" containerName="nova-metadata-metadata" containerID="cri-o://440108ea496c9ab660f6b98ceab053949b035d7b63ea3e20aaa516e2e5ae3c16" gracePeriod=30 Dec 04 17:54:17 crc kubenswrapper[4631]: E1204 17:54:17.257195 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ed3a4215006d32029c189e7d0d23590233f9c8f12e197ab155e8019abbf3d828" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 04 17:54:17 crc kubenswrapper[4631]: E1204 17:54:17.259849 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ed3a4215006d32029c189e7d0d23590233f9c8f12e197ab155e8019abbf3d828" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 04 17:54:17 crc kubenswrapper[4631]: E1204 17:54:17.264352 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ed3a4215006d32029c189e7d0d23590233f9c8f12e197ab155e8019abbf3d828" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 04 17:54:17 crc kubenswrapper[4631]: E1204 17:54:17.264431 4631 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e" containerName="nova-scheduler-scheduler" Dec 04 17:54:17 crc kubenswrapper[4631]: I1204 17:54:17.356953 4631 generic.go:334] "Generic (PLEG): container finished" podID="abf757d5-6165-4cb3-9e40-b0eec2920b02" containerID="86b19dcb6971fa3c48d1e270ff2cb133a09b5de1d1c819038d02434ff32618f2" exitCode=143 Dec 04 17:54:17 crc kubenswrapper[4631]: I1204 17:54:17.357030 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"abf757d5-6165-4cb3-9e40-b0eec2920b02","Type":"ContainerDied","Data":"86b19dcb6971fa3c48d1e270ff2cb133a09b5de1d1c819038d02434ff32618f2"} Dec 04 17:54:17 crc kubenswrapper[4631]: I1204 17:54:17.359285 4631 generic.go:334] "Generic (PLEG): container finished" podID="ada8ef25-3d55-472a-aedd-ec8af72558ad" containerID="6b1ef5d8b0d8d05d2cf7fb5fb279f4b6e2bf0fec0c0bfad15bfb0a78ba7d4a00" exitCode=143 Dec 04 17:54:17 crc kubenswrapper[4631]: I1204 17:54:17.359327 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ada8ef25-3d55-472a-aedd-ec8af72558ad","Type":"ContainerDied","Data":"6b1ef5d8b0d8d05d2cf7fb5fb279f4b6e2bf0fec0c0bfad15bfb0a78ba7d4a00"} Dec 04 17:54:19 crc kubenswrapper[4631]: I1204 17:54:19.995175 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="abf757d5-6165-4cb3-9e40-b0eec2920b02" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": read tcp 10.217.0.2:53432->10.217.0.193:8775: read: connection reset by peer" Dec 04 17:54:19 crc kubenswrapper[4631]: I1204 17:54:19.995224 4631 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="abf757d5-6165-4cb3-9e40-b0eec2920b02" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": read tcp 10.217.0.2:53426->10.217.0.193:8775: read: connection reset by peer" Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.389563 4631 generic.go:334] "Generic (PLEG): container finished" podID="abf757d5-6165-4cb3-9e40-b0eec2920b02" containerID="440108ea496c9ab660f6b98ceab053949b035d7b63ea3e20aaa516e2e5ae3c16" exitCode=0 Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.390242 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"abf757d5-6165-4cb3-9e40-b0eec2920b02","Type":"ContainerDied","Data":"440108ea496c9ab660f6b98ceab053949b035d7b63ea3e20aaa516e2e5ae3c16"} Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.390350 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"abf757d5-6165-4cb3-9e40-b0eec2920b02","Type":"ContainerDied","Data":"32de8b6d3d4701ce651615620fb001aff9f511d9b0c742e6b2db82286f104fc8"} Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.390460 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="32de8b6d3d4701ce651615620fb001aff9f511d9b0c742e6b2db82286f104fc8" Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.449993 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.567489 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/abf757d5-6165-4cb3-9e40-b0eec2920b02-logs\") pod \"abf757d5-6165-4cb3-9e40-b0eec2920b02\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.567623 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mf5tx\" (UniqueName: \"kubernetes.io/projected/abf757d5-6165-4cb3-9e40-b0eec2920b02-kube-api-access-mf5tx\") pod \"abf757d5-6165-4cb3-9e40-b0eec2920b02\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.567657 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abf757d5-6165-4cb3-9e40-b0eec2920b02-combined-ca-bundle\") pod \"abf757d5-6165-4cb3-9e40-b0eec2920b02\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.567696 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abf757d5-6165-4cb3-9e40-b0eec2920b02-config-data\") pod \"abf757d5-6165-4cb3-9e40-b0eec2920b02\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.567723 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/abf757d5-6165-4cb3-9e40-b0eec2920b02-nova-metadata-tls-certs\") pod \"abf757d5-6165-4cb3-9e40-b0eec2920b02\" (UID: \"abf757d5-6165-4cb3-9e40-b0eec2920b02\") " Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.568901 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abf757d5-6165-4cb3-9e40-b0eec2920b02-logs" (OuterVolumeSpecName: "logs") pod "abf757d5-6165-4cb3-9e40-b0eec2920b02" (UID: "abf757d5-6165-4cb3-9e40-b0eec2920b02"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.573583 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abf757d5-6165-4cb3-9e40-b0eec2920b02-kube-api-access-mf5tx" (OuterVolumeSpecName: "kube-api-access-mf5tx") pod "abf757d5-6165-4cb3-9e40-b0eec2920b02" (UID: "abf757d5-6165-4cb3-9e40-b0eec2920b02"). InnerVolumeSpecName "kube-api-access-mf5tx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.606550 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abf757d5-6165-4cb3-9e40-b0eec2920b02-config-data" (OuterVolumeSpecName: "config-data") pod "abf757d5-6165-4cb3-9e40-b0eec2920b02" (UID: "abf757d5-6165-4cb3-9e40-b0eec2920b02"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.630763 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abf757d5-6165-4cb3-9e40-b0eec2920b02-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "abf757d5-6165-4cb3-9e40-b0eec2920b02" (UID: "abf757d5-6165-4cb3-9e40-b0eec2920b02"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.652488 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abf757d5-6165-4cb3-9e40-b0eec2920b02-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "abf757d5-6165-4cb3-9e40-b0eec2920b02" (UID: "abf757d5-6165-4cb3-9e40-b0eec2920b02"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.670494 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abf757d5-6165-4cb3-9e40-b0eec2920b02-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.670519 4631 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/abf757d5-6165-4cb3-9e40-b0eec2920b02-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.670532 4631 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/abf757d5-6165-4cb3-9e40-b0eec2920b02-logs\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.670540 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mf5tx\" (UniqueName: \"kubernetes.io/projected/abf757d5-6165-4cb3-9e40-b0eec2920b02-kube-api-access-mf5tx\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:20 crc kubenswrapper[4631]: I1204 17:54:20.670548 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abf757d5-6165-4cb3-9e40-b0eec2920b02-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.360626 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.409318 4631 generic.go:334] "Generic (PLEG): container finished" podID="aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e" containerID="ed3a4215006d32029c189e7d0d23590233f9c8f12e197ab155e8019abbf3d828" exitCode=0 Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.409378 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e","Type":"ContainerDied","Data":"ed3a4215006d32029c189e7d0d23590233f9c8f12e197ab155e8019abbf3d828"} Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.409447 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e","Type":"ContainerDied","Data":"274527604e7924f89593fe3aa8a1ea7dd9f9ba733a639a73463aae60b22983a4"} Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.409407 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.409473 4631 scope.go:117] "RemoveContainer" containerID="ed3a4215006d32029c189e7d0d23590233f9c8f12e197ab155e8019abbf3d828" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.412070 4631 generic.go:334] "Generic (PLEG): container finished" podID="ada8ef25-3d55-472a-aedd-ec8af72558ad" containerID="4423c875f1164156c01d4e39a715869b73180df4c717ca6bb719eb3bf5cffcd6" exitCode=0 Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.412134 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.413803 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ada8ef25-3d55-472a-aedd-ec8af72558ad","Type":"ContainerDied","Data":"4423c875f1164156c01d4e39a715869b73180df4c717ca6bb719eb3bf5cffcd6"} Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.477131 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.481044 4631 scope.go:117] "RemoveContainer" containerID="ed3a4215006d32029c189e7d0d23590233f9c8f12e197ab155e8019abbf3d828" Dec 04 17:54:21 crc kubenswrapper[4631]: E1204 17:54:21.481656 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed3a4215006d32029c189e7d0d23590233f9c8f12e197ab155e8019abbf3d828\": container with ID starting with ed3a4215006d32029c189e7d0d23590233f9c8f12e197ab155e8019abbf3d828 not found: ID does not exist" containerID="ed3a4215006d32029c189e7d0d23590233f9c8f12e197ab155e8019abbf3d828" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.485238 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e-combined-ca-bundle\") pod \"aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e\" (UID: \"aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e\") " Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.485350 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e-config-data\") pod \"aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e\" (UID: \"aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e\") " Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.485436 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjjtg\" (UniqueName: \"kubernetes.io/projected/aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e-kube-api-access-rjjtg\") pod \"aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e\" (UID: \"aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e\") " Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.489092 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed3a4215006d32029c189e7d0d23590233f9c8f12e197ab155e8019abbf3d828"} err="failed to get container status \"ed3a4215006d32029c189e7d0d23590233f9c8f12e197ab155e8019abbf3d828\": rpc error: code = NotFound desc = could not find container \"ed3a4215006d32029c189e7d0d23590233f9c8f12e197ab155e8019abbf3d828\": container with ID starting with ed3a4215006d32029c189e7d0d23590233f9c8f12e197ab155e8019abbf3d828 not found: ID does not exist" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.494138 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.497917 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e-kube-api-access-rjjtg" (OuterVolumeSpecName: "kube-api-access-rjjtg") pod "aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e" (UID: "aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e"). InnerVolumeSpecName "kube-api-access-rjjtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.500993 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.538185 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e-config-data" (OuterVolumeSpecName: "config-data") pod "aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e" (UID: "aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.550528 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e" (UID: "aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.557529 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:54:21 crc kubenswrapper[4631]: E1204 17:54:21.558203 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abf757d5-6165-4cb3-9e40-b0eec2920b02" containerName="nova-metadata-metadata" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.558293 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="abf757d5-6165-4cb3-9e40-b0eec2920b02" containerName="nova-metadata-metadata" Dec 04 17:54:21 crc kubenswrapper[4631]: E1204 17:54:21.558415 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10901c62-65f7-43ed-ab1e-93c30cb5b5f9" containerName="nova-manage" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.558492 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="10901c62-65f7-43ed-ab1e-93c30cb5b5f9" containerName="nova-manage" Dec 04 17:54:21 crc kubenswrapper[4631]: E1204 17:54:21.558571 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc96d3f1-aae9-4aac-bef8-c745f17a1ffb" containerName="init" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.558656 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc96d3f1-aae9-4aac-bef8-c745f17a1ffb" containerName="init" Dec 04 17:54:21 crc kubenswrapper[4631]: E1204 17:54:21.558735 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e" containerName="nova-scheduler-scheduler" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.558820 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e" containerName="nova-scheduler-scheduler" Dec 04 17:54:21 crc kubenswrapper[4631]: E1204 17:54:21.558930 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abf757d5-6165-4cb3-9e40-b0eec2920b02" containerName="nova-metadata-log" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.559002 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="abf757d5-6165-4cb3-9e40-b0eec2920b02" containerName="nova-metadata-log" Dec 04 17:54:21 crc kubenswrapper[4631]: E1204 17:54:21.559072 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ada8ef25-3d55-472a-aedd-ec8af72558ad" containerName="nova-api-log" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.559147 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="ada8ef25-3d55-472a-aedd-ec8af72558ad" containerName="nova-api-log" Dec 04 17:54:21 crc kubenswrapper[4631]: E1204 17:54:21.559239 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc96d3f1-aae9-4aac-bef8-c745f17a1ffb" containerName="dnsmasq-dns" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.559310 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc96d3f1-aae9-4aac-bef8-c745f17a1ffb" containerName="dnsmasq-dns" Dec 04 17:54:21 crc kubenswrapper[4631]: E1204 17:54:21.559429 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ada8ef25-3d55-472a-aedd-ec8af72558ad" containerName="nova-api-api" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.559508 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="ada8ef25-3d55-472a-aedd-ec8af72558ad" containerName="nova-api-api" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.559811 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc96d3f1-aae9-4aac-bef8-c745f17a1ffb" containerName="dnsmasq-dns" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.559899 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="10901c62-65f7-43ed-ab1e-93c30cb5b5f9" containerName="nova-manage" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.559983 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="abf757d5-6165-4cb3-9e40-b0eec2920b02" containerName="nova-metadata-metadata" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.560069 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="abf757d5-6165-4cb3-9e40-b0eec2920b02" containerName="nova-metadata-log" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.560142 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e" containerName="nova-scheduler-scheduler" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.560225 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="ada8ef25-3d55-472a-aedd-ec8af72558ad" containerName="nova-api-log" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.560301 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="ada8ef25-3d55-472a-aedd-ec8af72558ad" containerName="nova-api-api" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.563794 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.569213 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.569251 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.572676 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.587774 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-internal-tls-certs\") pod \"ada8ef25-3d55-472a-aedd-ec8af72558ad\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.587859 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lgpp\" (UniqueName: \"kubernetes.io/projected/ada8ef25-3d55-472a-aedd-ec8af72558ad-kube-api-access-6lgpp\") pod \"ada8ef25-3d55-472a-aedd-ec8af72558ad\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.587890 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-config-data\") pod \"ada8ef25-3d55-472a-aedd-ec8af72558ad\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.595889 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-public-tls-certs\") pod \"ada8ef25-3d55-472a-aedd-ec8af72558ad\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.596001 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ada8ef25-3d55-472a-aedd-ec8af72558ad-logs\") pod \"ada8ef25-3d55-472a-aedd-ec8af72558ad\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.596043 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-combined-ca-bundle\") pod \"ada8ef25-3d55-472a-aedd-ec8af72558ad\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.596818 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjjtg\" (UniqueName: \"kubernetes.io/projected/aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e-kube-api-access-rjjtg\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.596835 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.596846 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.599217 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ada8ef25-3d55-472a-aedd-ec8af72558ad-logs" (OuterVolumeSpecName: "logs") pod "ada8ef25-3d55-472a-aedd-ec8af72558ad" (UID: "ada8ef25-3d55-472a-aedd-ec8af72558ad"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.601976 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ada8ef25-3d55-472a-aedd-ec8af72558ad-kube-api-access-6lgpp" (OuterVolumeSpecName: "kube-api-access-6lgpp") pod "ada8ef25-3d55-472a-aedd-ec8af72558ad" (UID: "ada8ef25-3d55-472a-aedd-ec8af72558ad"). InnerVolumeSpecName "kube-api-access-6lgpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.615771 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-config-data" (OuterVolumeSpecName: "config-data") pod "ada8ef25-3d55-472a-aedd-ec8af72558ad" (UID: "ada8ef25-3d55-472a-aedd-ec8af72558ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.622123 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ada8ef25-3d55-472a-aedd-ec8af72558ad" (UID: "ada8ef25-3d55-472a-aedd-ec8af72558ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:54:21 crc kubenswrapper[4631]: E1204 17:54:21.643671 4631 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-public-tls-certs podName:ada8ef25-3d55-472a-aedd-ec8af72558ad nodeName:}" failed. No retries permitted until 2025-12-04 17:54:22.143640541 +0000 UTC m=+1592.175882539 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-public-tls-certs") pod "ada8ef25-3d55-472a-aedd-ec8af72558ad" (UID: "ada8ef25-3d55-472a-aedd-ec8af72558ad") : error deleting /var/lib/kubelet/pods/ada8ef25-3d55-472a-aedd-ec8af72558ad/volume-subpaths: remove /var/lib/kubelet/pods/ada8ef25-3d55-472a-aedd-ec8af72558ad/volume-subpaths: no such file or directory Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.645626 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ada8ef25-3d55-472a-aedd-ec8af72558ad" (UID: "ada8ef25-3d55-472a-aedd-ec8af72558ad"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.698505 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df12d5a5-6083-4b46-b6bb-8894eb4f421b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"df12d5a5-6083-4b46-b6bb-8894eb4f421b\") " pod="openstack/nova-metadata-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.698549 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df12d5a5-6083-4b46-b6bb-8894eb4f421b-config-data\") pod \"nova-metadata-0\" (UID: \"df12d5a5-6083-4b46-b6bb-8894eb4f421b\") " pod="openstack/nova-metadata-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.698590 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/df12d5a5-6083-4b46-b6bb-8894eb4f421b-logs\") pod \"nova-metadata-0\" (UID: \"df12d5a5-6083-4b46-b6bb-8894eb4f421b\") " pod="openstack/nova-metadata-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.698623 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlc25\" (UniqueName: \"kubernetes.io/projected/df12d5a5-6083-4b46-b6bb-8894eb4f421b-kube-api-access-hlc25\") pod \"nova-metadata-0\" (UID: \"df12d5a5-6083-4b46-b6bb-8894eb4f421b\") " pod="openstack/nova-metadata-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.698701 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/df12d5a5-6083-4b46-b6bb-8894eb4f421b-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"df12d5a5-6083-4b46-b6bb-8894eb4f421b\") " pod="openstack/nova-metadata-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.698755 4631 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ada8ef25-3d55-472a-aedd-ec8af72558ad-logs\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.698767 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.698777 4631 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.698786 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lgpp\" (UniqueName: \"kubernetes.io/projected/ada8ef25-3d55-472a-aedd-ec8af72558ad-kube-api-access-6lgpp\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.698795 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.746701 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.762012 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.781299 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.783683 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.789510 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.790040 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.800500 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/df12d5a5-6083-4b46-b6bb-8894eb4f421b-logs\") pod \"nova-metadata-0\" (UID: \"df12d5a5-6083-4b46-b6bb-8894eb4f421b\") " pod="openstack/nova-metadata-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.800564 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlc25\" (UniqueName: \"kubernetes.io/projected/df12d5a5-6083-4b46-b6bb-8894eb4f421b-kube-api-access-hlc25\") pod \"nova-metadata-0\" (UID: \"df12d5a5-6083-4b46-b6bb-8894eb4f421b\") " pod="openstack/nova-metadata-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.800651 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/df12d5a5-6083-4b46-b6bb-8894eb4f421b-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"df12d5a5-6083-4b46-b6bb-8894eb4f421b\") " pod="openstack/nova-metadata-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.800692 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df12d5a5-6083-4b46-b6bb-8894eb4f421b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"df12d5a5-6083-4b46-b6bb-8894eb4f421b\") " pod="openstack/nova-metadata-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.800716 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df12d5a5-6083-4b46-b6bb-8894eb4f421b-config-data\") pod \"nova-metadata-0\" (UID: \"df12d5a5-6083-4b46-b6bb-8894eb4f421b\") " pod="openstack/nova-metadata-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.800971 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/df12d5a5-6083-4b46-b6bb-8894eb4f421b-logs\") pod \"nova-metadata-0\" (UID: \"df12d5a5-6083-4b46-b6bb-8894eb4f421b\") " pod="openstack/nova-metadata-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.805242 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/df12d5a5-6083-4b46-b6bb-8894eb4f421b-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"df12d5a5-6083-4b46-b6bb-8894eb4f421b\") " pod="openstack/nova-metadata-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.807128 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df12d5a5-6083-4b46-b6bb-8894eb4f421b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"df12d5a5-6083-4b46-b6bb-8894eb4f421b\") " pod="openstack/nova-metadata-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.808664 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df12d5a5-6083-4b46-b6bb-8894eb4f421b-config-data\") pod \"nova-metadata-0\" (UID: \"df12d5a5-6083-4b46-b6bb-8894eb4f421b\") " pod="openstack/nova-metadata-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.818484 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlc25\" (UniqueName: \"kubernetes.io/projected/df12d5a5-6083-4b46-b6bb-8894eb4f421b-kube-api-access-hlc25\") pod \"nova-metadata-0\" (UID: \"df12d5a5-6083-4b46-b6bb-8894eb4f421b\") " pod="openstack/nova-metadata-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.878402 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.902920 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77627c2f-d3c1-4699-9c42-8ab97657f312-config-data\") pod \"nova-scheduler-0\" (UID: \"77627c2f-d3c1-4699-9c42-8ab97657f312\") " pod="openstack/nova-scheduler-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.902975 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8xnv\" (UniqueName: \"kubernetes.io/projected/77627c2f-d3c1-4699-9c42-8ab97657f312-kube-api-access-p8xnv\") pod \"nova-scheduler-0\" (UID: \"77627c2f-d3c1-4699-9c42-8ab97657f312\") " pod="openstack/nova-scheduler-0" Dec 04 17:54:21 crc kubenswrapper[4631]: I1204 17:54:21.903327 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77627c2f-d3c1-4699-9c42-8ab97657f312-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"77627c2f-d3c1-4699-9c42-8ab97657f312\") " pod="openstack/nova-scheduler-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.006500 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77627c2f-d3c1-4699-9c42-8ab97657f312-config-data\") pod \"nova-scheduler-0\" (UID: \"77627c2f-d3c1-4699-9c42-8ab97657f312\") " pod="openstack/nova-scheduler-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.006897 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8xnv\" (UniqueName: \"kubernetes.io/projected/77627c2f-d3c1-4699-9c42-8ab97657f312-kube-api-access-p8xnv\") pod \"nova-scheduler-0\" (UID: \"77627c2f-d3c1-4699-9c42-8ab97657f312\") " pod="openstack/nova-scheduler-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.006989 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77627c2f-d3c1-4699-9c42-8ab97657f312-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"77627c2f-d3c1-4699-9c42-8ab97657f312\") " pod="openstack/nova-scheduler-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.010597 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77627c2f-d3c1-4699-9c42-8ab97657f312-config-data\") pod \"nova-scheduler-0\" (UID: \"77627c2f-d3c1-4699-9c42-8ab97657f312\") " pod="openstack/nova-scheduler-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.013023 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77627c2f-d3c1-4699-9c42-8ab97657f312-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"77627c2f-d3c1-4699-9c42-8ab97657f312\") " pod="openstack/nova-scheduler-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.025738 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8xnv\" (UniqueName: \"kubernetes.io/projected/77627c2f-d3c1-4699-9c42-8ab97657f312-kube-api-access-p8xnv\") pod \"nova-scheduler-0\" (UID: \"77627c2f-d3c1-4699-9c42-8ab97657f312\") " pod="openstack/nova-scheduler-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.106312 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.208971 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-public-tls-certs\") pod \"ada8ef25-3d55-472a-aedd-ec8af72558ad\" (UID: \"ada8ef25-3d55-472a-aedd-ec8af72558ad\") " Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.215479 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ada8ef25-3d55-472a-aedd-ec8af72558ad" (UID: "ada8ef25-3d55-472a-aedd-ec8af72558ad"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.287956 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e" path="/var/lib/kubelet/pods/aa46a07d-fe09-42eb-99fe-cbbfb51a3c2e/volumes" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.288758 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abf757d5-6165-4cb3-9e40-b0eec2920b02" path="/var/lib/kubelet/pods/abf757d5-6165-4cb3-9e40-b0eec2920b02/volumes" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.311485 4631 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ada8ef25-3d55-472a-aedd-ec8af72558ad-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:22 crc kubenswrapper[4631]: W1204 17:54:22.339917 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf12d5a5_6083_4b46_b6bb_8894eb4f421b.slice/crio-7ad1823b5c77433a68d1f441c0c1cd35f4f6179f4d2f8fd8383567094fe80a23 WatchSource:0}: Error finding container 7ad1823b5c77433a68d1f441c0c1cd35f4f6179f4d2f8fd8383567094fe80a23: Status 404 returned error can't find the container with id 7ad1823b5c77433a68d1f441c0c1cd35f4f6179f4d2f8fd8383567094fe80a23 Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.341233 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.432976 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"df12d5a5-6083-4b46-b6bb-8894eb4f421b","Type":"ContainerStarted","Data":"7ad1823b5c77433a68d1f441c0c1cd35f4f6179f4d2f8fd8383567094fe80a23"} Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.438232 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ada8ef25-3d55-472a-aedd-ec8af72558ad","Type":"ContainerDied","Data":"a8db2dbfa9623068b630a772bc4dbad3172e9a01043a16ab0ce0673a9f8e77e1"} Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.438283 4631 scope.go:117] "RemoveContainer" containerID="4423c875f1164156c01d4e39a715869b73180df4c717ca6bb719eb3bf5cffcd6" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.438362 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.474706 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.484993 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.508182 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.510031 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.516307 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.516601 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.516702 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.519532 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.558182 4631 scope.go:117] "RemoveContainer" containerID="6b1ef5d8b0d8d05d2cf7fb5fb279f4b6e2bf0fec0c0bfad15bfb0a78ba7d4a00" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.841039 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b96713c5-6fba-4ee6-9111-5aedf572a172-config-data\") pod \"nova-api-0\" (UID: \"b96713c5-6fba-4ee6-9111-5aedf572a172\") " pod="openstack/nova-api-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.841099 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b96713c5-6fba-4ee6-9111-5aedf572a172-public-tls-certs\") pod \"nova-api-0\" (UID: \"b96713c5-6fba-4ee6-9111-5aedf572a172\") " pod="openstack/nova-api-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.841120 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92g84\" (UniqueName: \"kubernetes.io/projected/b96713c5-6fba-4ee6-9111-5aedf572a172-kube-api-access-92g84\") pod \"nova-api-0\" (UID: \"b96713c5-6fba-4ee6-9111-5aedf572a172\") " pod="openstack/nova-api-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.841140 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b96713c5-6fba-4ee6-9111-5aedf572a172-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b96713c5-6fba-4ee6-9111-5aedf572a172\") " pod="openstack/nova-api-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.841178 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b96713c5-6fba-4ee6-9111-5aedf572a172-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b96713c5-6fba-4ee6-9111-5aedf572a172\") " pod="openstack/nova-api-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.841201 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b96713c5-6fba-4ee6-9111-5aedf572a172-logs\") pod \"nova-api-0\" (UID: \"b96713c5-6fba-4ee6-9111-5aedf572a172\") " pod="openstack/nova-api-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.901738 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 04 17:54:22 crc kubenswrapper[4631]: W1204 17:54:22.904537 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod77627c2f_d3c1_4699_9c42_8ab97657f312.slice/crio-f7aa62d116414e6c890e7fd5c7934fea72c21f19c3a78322c6e5cc0c767b6f84 WatchSource:0}: Error finding container f7aa62d116414e6c890e7fd5c7934fea72c21f19c3a78322c6e5cc0c767b6f84: Status 404 returned error can't find the container with id f7aa62d116414e6c890e7fd5c7934fea72c21f19c3a78322c6e5cc0c767b6f84 Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.942309 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b96713c5-6fba-4ee6-9111-5aedf572a172-config-data\") pod \"nova-api-0\" (UID: \"b96713c5-6fba-4ee6-9111-5aedf572a172\") " pod="openstack/nova-api-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.942385 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b96713c5-6fba-4ee6-9111-5aedf572a172-public-tls-certs\") pod \"nova-api-0\" (UID: \"b96713c5-6fba-4ee6-9111-5aedf572a172\") " pod="openstack/nova-api-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.942404 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92g84\" (UniqueName: \"kubernetes.io/projected/b96713c5-6fba-4ee6-9111-5aedf572a172-kube-api-access-92g84\") pod \"nova-api-0\" (UID: \"b96713c5-6fba-4ee6-9111-5aedf572a172\") " pod="openstack/nova-api-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.942423 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b96713c5-6fba-4ee6-9111-5aedf572a172-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b96713c5-6fba-4ee6-9111-5aedf572a172\") " pod="openstack/nova-api-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.942477 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b96713c5-6fba-4ee6-9111-5aedf572a172-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b96713c5-6fba-4ee6-9111-5aedf572a172\") " pod="openstack/nova-api-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.942498 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b96713c5-6fba-4ee6-9111-5aedf572a172-logs\") pod \"nova-api-0\" (UID: \"b96713c5-6fba-4ee6-9111-5aedf572a172\") " pod="openstack/nova-api-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.943064 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b96713c5-6fba-4ee6-9111-5aedf572a172-logs\") pod \"nova-api-0\" (UID: \"b96713c5-6fba-4ee6-9111-5aedf572a172\") " pod="openstack/nova-api-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.946272 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b96713c5-6fba-4ee6-9111-5aedf572a172-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b96713c5-6fba-4ee6-9111-5aedf572a172\") " pod="openstack/nova-api-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.946504 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b96713c5-6fba-4ee6-9111-5aedf572a172-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b96713c5-6fba-4ee6-9111-5aedf572a172\") " pod="openstack/nova-api-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.946553 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b96713c5-6fba-4ee6-9111-5aedf572a172-config-data\") pod \"nova-api-0\" (UID: \"b96713c5-6fba-4ee6-9111-5aedf572a172\") " pod="openstack/nova-api-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.947603 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b96713c5-6fba-4ee6-9111-5aedf572a172-public-tls-certs\") pod \"nova-api-0\" (UID: \"b96713c5-6fba-4ee6-9111-5aedf572a172\") " pod="openstack/nova-api-0" Dec 04 17:54:22 crc kubenswrapper[4631]: I1204 17:54:22.958200 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92g84\" (UniqueName: \"kubernetes.io/projected/b96713c5-6fba-4ee6-9111-5aedf572a172-kube-api-access-92g84\") pod \"nova-api-0\" (UID: \"b96713c5-6fba-4ee6-9111-5aedf572a172\") " pod="openstack/nova-api-0" Dec 04 17:54:23 crc kubenswrapper[4631]: I1204 17:54:23.171786 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 04 17:54:23 crc kubenswrapper[4631]: I1204 17:54:23.458565 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"df12d5a5-6083-4b46-b6bb-8894eb4f421b","Type":"ContainerStarted","Data":"918d191f5cc6edde8154c96a9931858c3cc7db2a98f2812e5ad36841f2218404"} Dec 04 17:54:23 crc kubenswrapper[4631]: I1204 17:54:23.458887 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"df12d5a5-6083-4b46-b6bb-8894eb4f421b","Type":"ContainerStarted","Data":"9fd5fbc65abe13fe962ffa452c69c7801e35690f67f01ddf0f63ed00bce4f77e"} Dec 04 17:54:23 crc kubenswrapper[4631]: I1204 17:54:23.465504 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"77627c2f-d3c1-4699-9c42-8ab97657f312","Type":"ContainerStarted","Data":"fa780fcab1804c14c9393a0c630f3ff93cd7e2bc5e3c9d43de76a4a034adf9e9"} Dec 04 17:54:23 crc kubenswrapper[4631]: I1204 17:54:23.465611 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"77627c2f-d3c1-4699-9c42-8ab97657f312","Type":"ContainerStarted","Data":"f7aa62d116414e6c890e7fd5c7934fea72c21f19c3a78322c6e5cc0c767b6f84"} Dec 04 17:54:23 crc kubenswrapper[4631]: I1204 17:54:23.484061 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.484039205 podStartE2EDuration="2.484039205s" podCreationTimestamp="2025-12-04 17:54:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:54:23.477746994 +0000 UTC m=+1593.509988992" watchObservedRunningTime="2025-12-04 17:54:23.484039205 +0000 UTC m=+1593.516281193" Dec 04 17:54:23 crc kubenswrapper[4631]: I1204 17:54:23.501029 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.501009906 podStartE2EDuration="2.501009906s" podCreationTimestamp="2025-12-04 17:54:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:54:23.50007486 +0000 UTC m=+1593.532316878" watchObservedRunningTime="2025-12-04 17:54:23.501009906 +0000 UTC m=+1593.533251924" Dec 04 17:54:23 crc kubenswrapper[4631]: W1204 17:54:23.645740 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb96713c5_6fba_4ee6_9111_5aedf572a172.slice/crio-3bfcd587e473af35e75f3efbedd693b89a0638e5002e5128b196cca1dfe1b2fa WatchSource:0}: Error finding container 3bfcd587e473af35e75f3efbedd693b89a0638e5002e5128b196cca1dfe1b2fa: Status 404 returned error can't find the container with id 3bfcd587e473af35e75f3efbedd693b89a0638e5002e5128b196cca1dfe1b2fa Dec 04 17:54:23 crc kubenswrapper[4631]: I1204 17:54:23.648842 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 04 17:54:24 crc kubenswrapper[4631]: I1204 17:54:24.260912 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ada8ef25-3d55-472a-aedd-ec8af72558ad" path="/var/lib/kubelet/pods/ada8ef25-3d55-472a-aedd-ec8af72558ad/volumes" Dec 04 17:54:24 crc kubenswrapper[4631]: I1204 17:54:24.476196 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b96713c5-6fba-4ee6-9111-5aedf572a172","Type":"ContainerStarted","Data":"1ae5177bacb1f92d6392ebb482993533acd37a5cc30a750d8be65be873630f27"} Dec 04 17:54:24 crc kubenswrapper[4631]: I1204 17:54:24.476238 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b96713c5-6fba-4ee6-9111-5aedf572a172","Type":"ContainerStarted","Data":"2e1558392e4b25e836254ef8aacbf7ef990f453f6e6d07565450be31579a3b33"} Dec 04 17:54:24 crc kubenswrapper[4631]: I1204 17:54:24.476248 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b96713c5-6fba-4ee6-9111-5aedf572a172","Type":"ContainerStarted","Data":"3bfcd587e473af35e75f3efbedd693b89a0638e5002e5128b196cca1dfe1b2fa"} Dec 04 17:54:24 crc kubenswrapper[4631]: I1204 17:54:24.502417 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.502399766 podStartE2EDuration="2.502399766s" podCreationTimestamp="2025-12-04 17:54:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:54:24.495233161 +0000 UTC m=+1594.527475149" watchObservedRunningTime="2025-12-04 17:54:24.502399766 +0000 UTC m=+1594.534641764" Dec 04 17:54:26 crc kubenswrapper[4631]: I1204 17:54:26.879485 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 04 17:54:26 crc kubenswrapper[4631]: I1204 17:54:26.879807 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 04 17:54:27 crc kubenswrapper[4631]: I1204 17:54:27.107259 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 04 17:54:27 crc kubenswrapper[4631]: I1204 17:54:27.239157 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:54:27 crc kubenswrapper[4631]: E1204 17:54:27.239487 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:54:31 crc kubenswrapper[4631]: I1204 17:54:31.878947 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 04 17:54:31 crc kubenswrapper[4631]: I1204 17:54:31.879525 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 04 17:54:32 crc kubenswrapper[4631]: I1204 17:54:32.107019 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 04 17:54:32 crc kubenswrapper[4631]: I1204 17:54:32.136570 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 04 17:54:32 crc kubenswrapper[4631]: I1204 17:54:32.583655 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 04 17:54:32 crc kubenswrapper[4631]: I1204 17:54:32.892581 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="df12d5a5-6083-4b46-b6bb-8894eb4f421b" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.202:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 17:54:32 crc kubenswrapper[4631]: I1204 17:54:32.892696 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="df12d5a5-6083-4b46-b6bb-8894eb4f421b" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.202:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 17:54:33 crc kubenswrapper[4631]: I1204 17:54:33.172017 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 04 17:54:33 crc kubenswrapper[4631]: I1204 17:54:33.172395 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 04 17:54:34 crc kubenswrapper[4631]: I1204 17:54:34.185571 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b96713c5-6fba-4ee6-9111-5aedf572a172" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 17:54:34 crc kubenswrapper[4631]: I1204 17:54:34.185588 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b96713c5-6fba-4ee6-9111-5aedf572a172" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 04 17:54:34 crc kubenswrapper[4631]: I1204 17:54:34.761423 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 04 17:54:34 crc kubenswrapper[4631]: I1204 17:54:34.990968 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-scmk8"] Dec 04 17:54:34 crc kubenswrapper[4631]: I1204 17:54:34.994263 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-scmk8" Dec 04 17:54:35 crc kubenswrapper[4631]: I1204 17:54:35.009661 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-scmk8"] Dec 04 17:54:35 crc kubenswrapper[4631]: I1204 17:54:35.087826 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61d9276e-715c-402f-99e1-0cc2d58ffec9-catalog-content\") pod \"redhat-marketplace-scmk8\" (UID: \"61d9276e-715c-402f-99e1-0cc2d58ffec9\") " pod="openshift-marketplace/redhat-marketplace-scmk8" Dec 04 17:54:35 crc kubenswrapper[4631]: I1204 17:54:35.087873 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tx8jx\" (UniqueName: \"kubernetes.io/projected/61d9276e-715c-402f-99e1-0cc2d58ffec9-kube-api-access-tx8jx\") pod \"redhat-marketplace-scmk8\" (UID: \"61d9276e-715c-402f-99e1-0cc2d58ffec9\") " pod="openshift-marketplace/redhat-marketplace-scmk8" Dec 04 17:54:35 crc kubenswrapper[4631]: I1204 17:54:35.087937 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61d9276e-715c-402f-99e1-0cc2d58ffec9-utilities\") pod \"redhat-marketplace-scmk8\" (UID: \"61d9276e-715c-402f-99e1-0cc2d58ffec9\") " pod="openshift-marketplace/redhat-marketplace-scmk8" Dec 04 17:54:35 crc kubenswrapper[4631]: I1204 17:54:35.189459 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61d9276e-715c-402f-99e1-0cc2d58ffec9-catalog-content\") pod \"redhat-marketplace-scmk8\" (UID: \"61d9276e-715c-402f-99e1-0cc2d58ffec9\") " pod="openshift-marketplace/redhat-marketplace-scmk8" Dec 04 17:54:35 crc kubenswrapper[4631]: I1204 17:54:35.191111 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tx8jx\" (UniqueName: \"kubernetes.io/projected/61d9276e-715c-402f-99e1-0cc2d58ffec9-kube-api-access-tx8jx\") pod \"redhat-marketplace-scmk8\" (UID: \"61d9276e-715c-402f-99e1-0cc2d58ffec9\") " pod="openshift-marketplace/redhat-marketplace-scmk8" Dec 04 17:54:35 crc kubenswrapper[4631]: I1204 17:54:35.190175 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61d9276e-715c-402f-99e1-0cc2d58ffec9-catalog-content\") pod \"redhat-marketplace-scmk8\" (UID: \"61d9276e-715c-402f-99e1-0cc2d58ffec9\") " pod="openshift-marketplace/redhat-marketplace-scmk8" Dec 04 17:54:35 crc kubenswrapper[4631]: I1204 17:54:35.191204 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61d9276e-715c-402f-99e1-0cc2d58ffec9-utilities\") pod \"redhat-marketplace-scmk8\" (UID: \"61d9276e-715c-402f-99e1-0cc2d58ffec9\") " pod="openshift-marketplace/redhat-marketplace-scmk8" Dec 04 17:54:35 crc kubenswrapper[4631]: I1204 17:54:35.191579 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61d9276e-715c-402f-99e1-0cc2d58ffec9-utilities\") pod \"redhat-marketplace-scmk8\" (UID: \"61d9276e-715c-402f-99e1-0cc2d58ffec9\") " pod="openshift-marketplace/redhat-marketplace-scmk8" Dec 04 17:54:35 crc kubenswrapper[4631]: I1204 17:54:35.216936 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tx8jx\" (UniqueName: \"kubernetes.io/projected/61d9276e-715c-402f-99e1-0cc2d58ffec9-kube-api-access-tx8jx\") pod \"redhat-marketplace-scmk8\" (UID: \"61d9276e-715c-402f-99e1-0cc2d58ffec9\") " pod="openshift-marketplace/redhat-marketplace-scmk8" Dec 04 17:54:35 crc kubenswrapper[4631]: I1204 17:54:35.317545 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-scmk8" Dec 04 17:54:35 crc kubenswrapper[4631]: I1204 17:54:35.830660 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-scmk8"] Dec 04 17:54:36 crc kubenswrapper[4631]: I1204 17:54:36.590107 4631 generic.go:334] "Generic (PLEG): container finished" podID="61d9276e-715c-402f-99e1-0cc2d58ffec9" containerID="3e8b91167e411191da244d92e9016c0a2ecccfb1f34ba8f4673db04e8732d9d6" exitCode=0 Dec 04 17:54:36 crc kubenswrapper[4631]: I1204 17:54:36.590158 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scmk8" event={"ID":"61d9276e-715c-402f-99e1-0cc2d58ffec9","Type":"ContainerDied","Data":"3e8b91167e411191da244d92e9016c0a2ecccfb1f34ba8f4673db04e8732d9d6"} Dec 04 17:54:36 crc kubenswrapper[4631]: I1204 17:54:36.590186 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scmk8" event={"ID":"61d9276e-715c-402f-99e1-0cc2d58ffec9","Type":"ContainerStarted","Data":"75919d3cf911f796423caba1fdfb61d07aa653f55c44d62ef3fa4cf3abfdcd94"} Dec 04 17:54:37 crc kubenswrapper[4631]: I1204 17:54:37.602707 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scmk8" event={"ID":"61d9276e-715c-402f-99e1-0cc2d58ffec9","Type":"ContainerStarted","Data":"96d335b74b42b846ef1f8d64fb83dfb802d7e254073202e5e00974843c73ead4"} Dec 04 17:54:38 crc kubenswrapper[4631]: I1204 17:54:38.240160 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:54:38 crc kubenswrapper[4631]: E1204 17:54:38.240747 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:54:38 crc kubenswrapper[4631]: I1204 17:54:38.613359 4631 generic.go:334] "Generic (PLEG): container finished" podID="61d9276e-715c-402f-99e1-0cc2d58ffec9" containerID="96d335b74b42b846ef1f8d64fb83dfb802d7e254073202e5e00974843c73ead4" exitCode=0 Dec 04 17:54:38 crc kubenswrapper[4631]: I1204 17:54:38.613422 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scmk8" event={"ID":"61d9276e-715c-402f-99e1-0cc2d58ffec9","Type":"ContainerDied","Data":"96d335b74b42b846ef1f8d64fb83dfb802d7e254073202e5e00974843c73ead4"} Dec 04 17:54:39 crc kubenswrapper[4631]: I1204 17:54:39.625039 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scmk8" event={"ID":"61d9276e-715c-402f-99e1-0cc2d58ffec9","Type":"ContainerStarted","Data":"4297e72946e8fcda32279e12faa6e41cc3580dd8f4925a0119e14d2390b5e2f6"} Dec 04 17:54:39 crc kubenswrapper[4631]: I1204 17:54:39.652896 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-scmk8" podStartSLOduration=3.231868874 podStartE2EDuration="5.652872527s" podCreationTimestamp="2025-12-04 17:54:34 +0000 UTC" firstStartedPulling="2025-12-04 17:54:36.591963615 +0000 UTC m=+1606.624205613" lastFinishedPulling="2025-12-04 17:54:39.012967268 +0000 UTC m=+1609.045209266" observedRunningTime="2025-12-04 17:54:39.645558018 +0000 UTC m=+1609.677800026" watchObservedRunningTime="2025-12-04 17:54:39.652872527 +0000 UTC m=+1609.685114545" Dec 04 17:54:41 crc kubenswrapper[4631]: I1204 17:54:41.885879 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 04 17:54:41 crc kubenswrapper[4631]: I1204 17:54:41.892478 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 04 17:54:41 crc kubenswrapper[4631]: I1204 17:54:41.893330 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 04 17:54:42 crc kubenswrapper[4631]: I1204 17:54:42.664989 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 04 17:54:43 crc kubenswrapper[4631]: I1204 17:54:43.182634 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 04 17:54:43 crc kubenswrapper[4631]: I1204 17:54:43.183120 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 04 17:54:43 crc kubenswrapper[4631]: I1204 17:54:43.183140 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 04 17:54:43 crc kubenswrapper[4631]: I1204 17:54:43.192789 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 04 17:54:43 crc kubenswrapper[4631]: I1204 17:54:43.665607 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 04 17:54:43 crc kubenswrapper[4631]: I1204 17:54:43.671245 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 04 17:54:43 crc kubenswrapper[4631]: I1204 17:54:43.993045 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dmbbs"] Dec 04 17:54:43 crc kubenswrapper[4631]: I1204 17:54:43.995605 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dmbbs" Dec 04 17:54:44 crc kubenswrapper[4631]: I1204 17:54:44.014816 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dmbbs"] Dec 04 17:54:44 crc kubenswrapper[4631]: I1204 17:54:44.167987 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp9rs\" (UniqueName: \"kubernetes.io/projected/249c0713-d835-4d5f-a14d-f0cc6371eac4-kube-api-access-tp9rs\") pod \"redhat-operators-dmbbs\" (UID: \"249c0713-d835-4d5f-a14d-f0cc6371eac4\") " pod="openshift-marketplace/redhat-operators-dmbbs" Dec 04 17:54:44 crc kubenswrapper[4631]: I1204 17:54:44.168045 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/249c0713-d835-4d5f-a14d-f0cc6371eac4-utilities\") pod \"redhat-operators-dmbbs\" (UID: \"249c0713-d835-4d5f-a14d-f0cc6371eac4\") " pod="openshift-marketplace/redhat-operators-dmbbs" Dec 04 17:54:44 crc kubenswrapper[4631]: I1204 17:54:44.168287 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/249c0713-d835-4d5f-a14d-f0cc6371eac4-catalog-content\") pod \"redhat-operators-dmbbs\" (UID: \"249c0713-d835-4d5f-a14d-f0cc6371eac4\") " pod="openshift-marketplace/redhat-operators-dmbbs" Dec 04 17:54:44 crc kubenswrapper[4631]: I1204 17:54:44.269714 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp9rs\" (UniqueName: \"kubernetes.io/projected/249c0713-d835-4d5f-a14d-f0cc6371eac4-kube-api-access-tp9rs\") pod \"redhat-operators-dmbbs\" (UID: \"249c0713-d835-4d5f-a14d-f0cc6371eac4\") " pod="openshift-marketplace/redhat-operators-dmbbs" Dec 04 17:54:44 crc kubenswrapper[4631]: I1204 17:54:44.269763 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/249c0713-d835-4d5f-a14d-f0cc6371eac4-utilities\") pod \"redhat-operators-dmbbs\" (UID: \"249c0713-d835-4d5f-a14d-f0cc6371eac4\") " pod="openshift-marketplace/redhat-operators-dmbbs" Dec 04 17:54:44 crc kubenswrapper[4631]: I1204 17:54:44.269809 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/249c0713-d835-4d5f-a14d-f0cc6371eac4-catalog-content\") pod \"redhat-operators-dmbbs\" (UID: \"249c0713-d835-4d5f-a14d-f0cc6371eac4\") " pod="openshift-marketplace/redhat-operators-dmbbs" Dec 04 17:54:44 crc kubenswrapper[4631]: I1204 17:54:44.270442 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/249c0713-d835-4d5f-a14d-f0cc6371eac4-catalog-content\") pod \"redhat-operators-dmbbs\" (UID: \"249c0713-d835-4d5f-a14d-f0cc6371eac4\") " pod="openshift-marketplace/redhat-operators-dmbbs" Dec 04 17:54:44 crc kubenswrapper[4631]: I1204 17:54:44.270527 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/249c0713-d835-4d5f-a14d-f0cc6371eac4-utilities\") pod \"redhat-operators-dmbbs\" (UID: \"249c0713-d835-4d5f-a14d-f0cc6371eac4\") " pod="openshift-marketplace/redhat-operators-dmbbs" Dec 04 17:54:44 crc kubenswrapper[4631]: I1204 17:54:44.289515 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp9rs\" (UniqueName: \"kubernetes.io/projected/249c0713-d835-4d5f-a14d-f0cc6371eac4-kube-api-access-tp9rs\") pod \"redhat-operators-dmbbs\" (UID: \"249c0713-d835-4d5f-a14d-f0cc6371eac4\") " pod="openshift-marketplace/redhat-operators-dmbbs" Dec 04 17:54:44 crc kubenswrapper[4631]: I1204 17:54:44.314279 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dmbbs" Dec 04 17:54:44 crc kubenswrapper[4631]: I1204 17:54:44.885943 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dmbbs"] Dec 04 17:54:45 crc kubenswrapper[4631]: I1204 17:54:45.317868 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-scmk8" Dec 04 17:54:45 crc kubenswrapper[4631]: I1204 17:54:45.317924 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-scmk8" Dec 04 17:54:45 crc kubenswrapper[4631]: I1204 17:54:45.682000 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dmbbs" event={"ID":"249c0713-d835-4d5f-a14d-f0cc6371eac4","Type":"ContainerStarted","Data":"ea86015849a54c83a0c7027bdbd6b531c91145f8b81726d4bdd8ae79abb9a40f"} Dec 04 17:54:45 crc kubenswrapper[4631]: I1204 17:54:45.928650 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-scmk8" Dec 04 17:54:46 crc kubenswrapper[4631]: I1204 17:54:46.063936 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-scmk8" Dec 04 17:54:46 crc kubenswrapper[4631]: I1204 17:54:46.692086 4631 generic.go:334] "Generic (PLEG): container finished" podID="249c0713-d835-4d5f-a14d-f0cc6371eac4" containerID="63ba5c58090e3b44170c85f90b7cd6757e29143e3c9029eed1627eef077b47f2" exitCode=0 Dec 04 17:54:46 crc kubenswrapper[4631]: I1204 17:54:46.692183 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dmbbs" event={"ID":"249c0713-d835-4d5f-a14d-f0cc6371eac4","Type":"ContainerDied","Data":"63ba5c58090e3b44170c85f90b7cd6757e29143e3c9029eed1627eef077b47f2"} Dec 04 17:54:47 crc kubenswrapper[4631]: I1204 17:54:47.979983 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-scmk8"] Dec 04 17:54:47 crc kubenswrapper[4631]: I1204 17:54:47.980487 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-scmk8" podUID="61d9276e-715c-402f-99e1-0cc2d58ffec9" containerName="registry-server" containerID="cri-o://4297e72946e8fcda32279e12faa6e41cc3580dd8f4925a0119e14d2390b5e2f6" gracePeriod=2 Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.664662 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-scmk8" Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.713956 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dmbbs" event={"ID":"249c0713-d835-4d5f-a14d-f0cc6371eac4","Type":"ContainerStarted","Data":"3de639a0f785eb9ded5666eef74465f2f5828fae563d691d295df9706af3643b"} Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.720387 4631 generic.go:334] "Generic (PLEG): container finished" podID="61d9276e-715c-402f-99e1-0cc2d58ffec9" containerID="4297e72946e8fcda32279e12faa6e41cc3580dd8f4925a0119e14d2390b5e2f6" exitCode=0 Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.720465 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scmk8" event={"ID":"61d9276e-715c-402f-99e1-0cc2d58ffec9","Type":"ContainerDied","Data":"4297e72946e8fcda32279e12faa6e41cc3580dd8f4925a0119e14d2390b5e2f6"} Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.720494 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scmk8" event={"ID":"61d9276e-715c-402f-99e1-0cc2d58ffec9","Type":"ContainerDied","Data":"75919d3cf911f796423caba1fdfb61d07aa653f55c44d62ef3fa4cf3abfdcd94"} Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.720513 4631 scope.go:117] "RemoveContainer" containerID="4297e72946e8fcda32279e12faa6e41cc3580dd8f4925a0119e14d2390b5e2f6" Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.720622 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-scmk8" Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.748284 4631 scope.go:117] "RemoveContainer" containerID="96d335b74b42b846ef1f8d64fb83dfb802d7e254073202e5e00974843c73ead4" Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.770221 4631 scope.go:117] "RemoveContainer" containerID="3e8b91167e411191da244d92e9016c0a2ecccfb1f34ba8f4673db04e8732d9d6" Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.773308 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61d9276e-715c-402f-99e1-0cc2d58ffec9-utilities\") pod \"61d9276e-715c-402f-99e1-0cc2d58ffec9\" (UID: \"61d9276e-715c-402f-99e1-0cc2d58ffec9\") " Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.773370 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61d9276e-715c-402f-99e1-0cc2d58ffec9-catalog-content\") pod \"61d9276e-715c-402f-99e1-0cc2d58ffec9\" (UID: \"61d9276e-715c-402f-99e1-0cc2d58ffec9\") " Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.773416 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tx8jx\" (UniqueName: \"kubernetes.io/projected/61d9276e-715c-402f-99e1-0cc2d58ffec9-kube-api-access-tx8jx\") pod \"61d9276e-715c-402f-99e1-0cc2d58ffec9\" (UID: \"61d9276e-715c-402f-99e1-0cc2d58ffec9\") " Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.775093 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61d9276e-715c-402f-99e1-0cc2d58ffec9-utilities" (OuterVolumeSpecName: "utilities") pod "61d9276e-715c-402f-99e1-0cc2d58ffec9" (UID: "61d9276e-715c-402f-99e1-0cc2d58ffec9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.780754 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61d9276e-715c-402f-99e1-0cc2d58ffec9-kube-api-access-tx8jx" (OuterVolumeSpecName: "kube-api-access-tx8jx") pod "61d9276e-715c-402f-99e1-0cc2d58ffec9" (UID: "61d9276e-715c-402f-99e1-0cc2d58ffec9"). InnerVolumeSpecName "kube-api-access-tx8jx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.793226 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61d9276e-715c-402f-99e1-0cc2d58ffec9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "61d9276e-715c-402f-99e1-0cc2d58ffec9" (UID: "61d9276e-715c-402f-99e1-0cc2d58ffec9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.821396 4631 scope.go:117] "RemoveContainer" containerID="4297e72946e8fcda32279e12faa6e41cc3580dd8f4925a0119e14d2390b5e2f6" Dec 04 17:54:48 crc kubenswrapper[4631]: E1204 17:54:48.821837 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4297e72946e8fcda32279e12faa6e41cc3580dd8f4925a0119e14d2390b5e2f6\": container with ID starting with 4297e72946e8fcda32279e12faa6e41cc3580dd8f4925a0119e14d2390b5e2f6 not found: ID does not exist" containerID="4297e72946e8fcda32279e12faa6e41cc3580dd8f4925a0119e14d2390b5e2f6" Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.821873 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4297e72946e8fcda32279e12faa6e41cc3580dd8f4925a0119e14d2390b5e2f6"} err="failed to get container status \"4297e72946e8fcda32279e12faa6e41cc3580dd8f4925a0119e14d2390b5e2f6\": rpc error: code = NotFound desc = could not find container \"4297e72946e8fcda32279e12faa6e41cc3580dd8f4925a0119e14d2390b5e2f6\": container with ID starting with 4297e72946e8fcda32279e12faa6e41cc3580dd8f4925a0119e14d2390b5e2f6 not found: ID does not exist" Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.821902 4631 scope.go:117] "RemoveContainer" containerID="96d335b74b42b846ef1f8d64fb83dfb802d7e254073202e5e00974843c73ead4" Dec 04 17:54:48 crc kubenswrapper[4631]: E1204 17:54:48.822252 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96d335b74b42b846ef1f8d64fb83dfb802d7e254073202e5e00974843c73ead4\": container with ID starting with 96d335b74b42b846ef1f8d64fb83dfb802d7e254073202e5e00974843c73ead4 not found: ID does not exist" containerID="96d335b74b42b846ef1f8d64fb83dfb802d7e254073202e5e00974843c73ead4" Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.822281 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96d335b74b42b846ef1f8d64fb83dfb802d7e254073202e5e00974843c73ead4"} err="failed to get container status \"96d335b74b42b846ef1f8d64fb83dfb802d7e254073202e5e00974843c73ead4\": rpc error: code = NotFound desc = could not find container \"96d335b74b42b846ef1f8d64fb83dfb802d7e254073202e5e00974843c73ead4\": container with ID starting with 96d335b74b42b846ef1f8d64fb83dfb802d7e254073202e5e00974843c73ead4 not found: ID does not exist" Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.822299 4631 scope.go:117] "RemoveContainer" containerID="3e8b91167e411191da244d92e9016c0a2ecccfb1f34ba8f4673db04e8732d9d6" Dec 04 17:54:48 crc kubenswrapper[4631]: E1204 17:54:48.822666 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e8b91167e411191da244d92e9016c0a2ecccfb1f34ba8f4673db04e8732d9d6\": container with ID starting with 3e8b91167e411191da244d92e9016c0a2ecccfb1f34ba8f4673db04e8732d9d6 not found: ID does not exist" containerID="3e8b91167e411191da244d92e9016c0a2ecccfb1f34ba8f4673db04e8732d9d6" Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.822747 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e8b91167e411191da244d92e9016c0a2ecccfb1f34ba8f4673db04e8732d9d6"} err="failed to get container status \"3e8b91167e411191da244d92e9016c0a2ecccfb1f34ba8f4673db04e8732d9d6\": rpc error: code = NotFound desc = could not find container \"3e8b91167e411191da244d92e9016c0a2ecccfb1f34ba8f4673db04e8732d9d6\": container with ID starting with 3e8b91167e411191da244d92e9016c0a2ecccfb1f34ba8f4673db04e8732d9d6 not found: ID does not exist" Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.876590 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61d9276e-715c-402f-99e1-0cc2d58ffec9-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.876624 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61d9276e-715c-402f-99e1-0cc2d58ffec9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:48 crc kubenswrapper[4631]: I1204 17:54:48.876662 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tx8jx\" (UniqueName: \"kubernetes.io/projected/61d9276e-715c-402f-99e1-0cc2d58ffec9-kube-api-access-tx8jx\") on node \"crc\" DevicePath \"\"" Dec 04 17:54:49 crc kubenswrapper[4631]: I1204 17:54:49.054123 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-scmk8"] Dec 04 17:54:49 crc kubenswrapper[4631]: I1204 17:54:49.062511 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-scmk8"] Dec 04 17:54:49 crc kubenswrapper[4631]: I1204 17:54:49.240003 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:54:49 crc kubenswrapper[4631]: E1204 17:54:49.240269 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:54:50 crc kubenswrapper[4631]: I1204 17:54:50.252211 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61d9276e-715c-402f-99e1-0cc2d58ffec9" path="/var/lib/kubelet/pods/61d9276e-715c-402f-99e1-0cc2d58ffec9/volumes" Dec 04 17:54:54 crc kubenswrapper[4631]: I1204 17:54:54.471324 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 04 17:54:54 crc kubenswrapper[4631]: I1204 17:54:54.748656 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 04 17:54:54 crc kubenswrapper[4631]: I1204 17:54:54.790502 4631 generic.go:334] "Generic (PLEG): container finished" podID="249c0713-d835-4d5f-a14d-f0cc6371eac4" containerID="3de639a0f785eb9ded5666eef74465f2f5828fae563d691d295df9706af3643b" exitCode=0 Dec 04 17:54:54 crc kubenswrapper[4631]: I1204 17:54:54.790765 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dmbbs" event={"ID":"249c0713-d835-4d5f-a14d-f0cc6371eac4","Type":"ContainerDied","Data":"3de639a0f785eb9ded5666eef74465f2f5828fae563d691d295df9706af3643b"} Dec 04 17:54:56 crc kubenswrapper[4631]: I1204 17:54:56.809585 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dmbbs" event={"ID":"249c0713-d835-4d5f-a14d-f0cc6371eac4","Type":"ContainerStarted","Data":"96732f397ddce18c0e020861671ba2319fb4f6e4e5df8c306d1cdf3be643c462"} Dec 04 17:54:58 crc kubenswrapper[4631]: I1204 17:54:58.112925 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dmbbs" podStartSLOduration=5.394958045 podStartE2EDuration="15.112901298s" podCreationTimestamp="2025-12-04 17:54:43 +0000 UTC" firstStartedPulling="2025-12-04 17:54:46.693668126 +0000 UTC m=+1616.725910124" lastFinishedPulling="2025-12-04 17:54:56.411611379 +0000 UTC m=+1626.443853377" observedRunningTime="2025-12-04 17:54:58.100016067 +0000 UTC m=+1628.132258065" watchObservedRunningTime="2025-12-04 17:54:58.112901298 +0000 UTC m=+1628.145143296" Dec 04 17:54:59 crc kubenswrapper[4631]: I1204 17:54:59.773599 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="a71b38c4-ee61-49f2-8c8c-5adc05df2159" containerName="rabbitmq" containerID="cri-o://ba1fcee1b1cf54b06e0eded8657cebf7b66dc017f8434f1966c353c37f104084" gracePeriod=604795 Dec 04 17:55:00 crc kubenswrapper[4631]: I1204 17:55:00.161859 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" containerName="rabbitmq" containerID="cri-o://15727b25e94c33a02c5db07e68c341ee307de19364380c2d194b03965262fa5b" gracePeriod=604795 Dec 04 17:55:04 crc kubenswrapper[4631]: I1204 17:55:04.239639 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:55:04 crc kubenswrapper[4631]: E1204 17:55:04.240164 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:55:04 crc kubenswrapper[4631]: I1204 17:55:04.315131 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dmbbs" Dec 04 17:55:04 crc kubenswrapper[4631]: I1204 17:55:04.315204 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dmbbs" Dec 04 17:55:05 crc kubenswrapper[4631]: I1204 17:55:05.363772 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dmbbs" podUID="249c0713-d835-4d5f-a14d-f0cc6371eac4" containerName="registry-server" probeResult="failure" output=< Dec 04 17:55:05 crc kubenswrapper[4631]: timeout: failed to connect service ":50051" within 1s Dec 04 17:55:05 crc kubenswrapper[4631]: > Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.460979 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.557629 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a71b38c4-ee61-49f2-8c8c-5adc05df2159-server-conf\") pod \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.557731 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-confd\") pod \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.557772 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-erlang-cookie\") pod \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.557797 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-tls\") pod \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.557820 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-plugins\") pod \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.557843 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.557872 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a71b38c4-ee61-49f2-8c8c-5adc05df2159-erlang-cookie-secret\") pod \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.557909 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a71b38c4-ee61-49f2-8c8c-5adc05df2159-plugins-conf\") pod \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.557976 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjrwd\" (UniqueName: \"kubernetes.io/projected/a71b38c4-ee61-49f2-8c8c-5adc05df2159-kube-api-access-gjrwd\") pod \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.557997 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a71b38c4-ee61-49f2-8c8c-5adc05df2159-config-data\") pod \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.558040 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a71b38c4-ee61-49f2-8c8c-5adc05df2159-pod-info\") pod \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\" (UID: \"a71b38c4-ee61-49f2-8c8c-5adc05df2159\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.559472 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "a71b38c4-ee61-49f2-8c8c-5adc05df2159" (UID: "a71b38c4-ee61-49f2-8c8c-5adc05df2159"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.562286 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "a71b38c4-ee61-49f2-8c8c-5adc05df2159" (UID: "a71b38c4-ee61-49f2-8c8c-5adc05df2159"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.563061 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a71b38c4-ee61-49f2-8c8c-5adc05df2159-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "a71b38c4-ee61-49f2-8c8c-5adc05df2159" (UID: "a71b38c4-ee61-49f2-8c8c-5adc05df2159"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.567135 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "a71b38c4-ee61-49f2-8c8c-5adc05df2159" (UID: "a71b38c4-ee61-49f2-8c8c-5adc05df2159"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.574026 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/a71b38c4-ee61-49f2-8c8c-5adc05df2159-pod-info" (OuterVolumeSpecName: "pod-info") pod "a71b38c4-ee61-49f2-8c8c-5adc05df2159" (UID: "a71b38c4-ee61-49f2-8c8c-5adc05df2159"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.599522 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a71b38c4-ee61-49f2-8c8c-5adc05df2159-kube-api-access-gjrwd" (OuterVolumeSpecName: "kube-api-access-gjrwd") pod "a71b38c4-ee61-49f2-8c8c-5adc05df2159" (UID: "a71b38c4-ee61-49f2-8c8c-5adc05df2159"). InnerVolumeSpecName "kube-api-access-gjrwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.616855 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "a71b38c4-ee61-49f2-8c8c-5adc05df2159" (UID: "a71b38c4-ee61-49f2-8c8c-5adc05df2159"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.616979 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a71b38c4-ee61-49f2-8c8c-5adc05df2159-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "a71b38c4-ee61-49f2-8c8c-5adc05df2159" (UID: "a71b38c4-ee61-49f2-8c8c-5adc05df2159"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.660860 4631 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.660886 4631 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.660895 4631 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.660915 4631 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.660924 4631 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a71b38c4-ee61-49f2-8c8c-5adc05df2159-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.660932 4631 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a71b38c4-ee61-49f2-8c8c-5adc05df2159-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.660940 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjrwd\" (UniqueName: \"kubernetes.io/projected/a71b38c4-ee61-49f2-8c8c-5adc05df2159-kube-api-access-gjrwd\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.660949 4631 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a71b38c4-ee61-49f2-8c8c-5adc05df2159-pod-info\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.680256 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a71b38c4-ee61-49f2-8c8c-5adc05df2159-config-data" (OuterVolumeSpecName: "config-data") pod "a71b38c4-ee61-49f2-8c8c-5adc05df2159" (UID: "a71b38c4-ee61-49f2-8c8c-5adc05df2159"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.706626 4631 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.717246 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a71b38c4-ee61-49f2-8c8c-5adc05df2159-server-conf" (OuterVolumeSpecName: "server-conf") pod "a71b38c4-ee61-49f2-8c8c-5adc05df2159" (UID: "a71b38c4-ee61-49f2-8c8c-5adc05df2159"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.756143 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.765358 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a71b38c4-ee61-49f2-8c8c-5adc05df2159-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.765678 4631 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a71b38c4-ee61-49f2-8c8c-5adc05df2159-server-conf\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.765689 4631 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.809809 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "a71b38c4-ee61-49f2-8c8c-5adc05df2159" (UID: "a71b38c4-ee61-49f2-8c8c-5adc05df2159"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.867103 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-pod-info\") pod \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.867169 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-plugins\") pod \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.867229 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-plugins-conf\") pod \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.867259 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-server-conf\") pod \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.867292 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-config-data\") pod \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.867321 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-tls\") pod \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.867366 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-confd\") pod \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.867412 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rq66d\" (UniqueName: \"kubernetes.io/projected/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-kube-api-access-rq66d\") pod \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.867435 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-erlang-cookie-secret\") pod \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.867470 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-erlang-cookie\") pod \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.867540 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\" (UID: \"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef\") " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.867937 4631 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a71b38c4-ee61-49f2-8c8c-5adc05df2159-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.873128 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" (UID: "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.874703 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" (UID: "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.877101 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-kube-api-access-rq66d" (OuterVolumeSpecName: "kube-api-access-rq66d") pod "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" (UID: "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef"). InnerVolumeSpecName "kube-api-access-rq66d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.877556 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" (UID: "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.880091 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" (UID: "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.884254 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "persistence") pod "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" (UID: "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.888099 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-pod-info" (OuterVolumeSpecName: "pod-info") pod "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" (UID: "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.906670 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" (UID: "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.935087 4631 generic.go:334] "Generic (PLEG): container finished" podID="2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" containerID="15727b25e94c33a02c5db07e68c341ee307de19364380c2d194b03965262fa5b" exitCode=0 Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.935144 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef","Type":"ContainerDied","Data":"15727b25e94c33a02c5db07e68c341ee307de19364380c2d194b03965262fa5b"} Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.935170 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2ad48d12-6e35-428b-ac2e-ee6c2cf668ef","Type":"ContainerDied","Data":"198eb62e4f98d1676b218676effd75697bf876b08d34c56bbf9c4a8293467834"} Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.935186 4631 scope.go:117] "RemoveContainer" containerID="15727b25e94c33a02c5db07e68c341ee307de19364380c2d194b03965262fa5b" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.935293 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.961086 4631 generic.go:334] "Generic (PLEG): container finished" podID="a71b38c4-ee61-49f2-8c8c-5adc05df2159" containerID="ba1fcee1b1cf54b06e0eded8657cebf7b66dc017f8434f1966c353c37f104084" exitCode=0 Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.961123 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a71b38c4-ee61-49f2-8c8c-5adc05df2159","Type":"ContainerDied","Data":"ba1fcee1b1cf54b06e0eded8657cebf7b66dc017f8434f1966c353c37f104084"} Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.961147 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a71b38c4-ee61-49f2-8c8c-5adc05df2159","Type":"ContainerDied","Data":"75cad5d58e51c5c945c6d86a2c7840ab5d7b0a339cb9675861a9df32f488a608"} Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.961202 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.966768 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-config-data" (OuterVolumeSpecName: "config-data") pod "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" (UID: "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.970202 4631 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.970355 4631 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-pod-info\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.970479 4631 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.970496 4631 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.970538 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.970550 4631 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.970597 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rq66d\" (UniqueName: \"kubernetes.io/projected/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-kube-api-access-rq66d\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.970608 4631 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.970658 4631 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:06 crc kubenswrapper[4631]: I1204 17:55:06.979210 4631 scope.go:117] "RemoveContainer" containerID="e833e3040ea35a329a540f414bd27255b574d3b651ea3a7efba64fa3fed817a2" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.032257 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-server-conf" (OuterVolumeSpecName: "server-conf") pod "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" (UID: "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.046304 4631 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.073517 4631 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-server-conf\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.073540 4631 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.105119 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" (UID: "2ad48d12-6e35-428b-ac2e-ee6c2cf668ef"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.174903 4631 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.195183 4631 scope.go:117] "RemoveContainer" containerID="15727b25e94c33a02c5db07e68c341ee307de19364380c2d194b03965262fa5b" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.200431 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 04 17:55:07 crc kubenswrapper[4631]: E1204 17:55:07.204986 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15727b25e94c33a02c5db07e68c341ee307de19364380c2d194b03965262fa5b\": container with ID starting with 15727b25e94c33a02c5db07e68c341ee307de19364380c2d194b03965262fa5b not found: ID does not exist" containerID="15727b25e94c33a02c5db07e68c341ee307de19364380c2d194b03965262fa5b" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.205023 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15727b25e94c33a02c5db07e68c341ee307de19364380c2d194b03965262fa5b"} err="failed to get container status \"15727b25e94c33a02c5db07e68c341ee307de19364380c2d194b03965262fa5b\": rpc error: code = NotFound desc = could not find container \"15727b25e94c33a02c5db07e68c341ee307de19364380c2d194b03965262fa5b\": container with ID starting with 15727b25e94c33a02c5db07e68c341ee307de19364380c2d194b03965262fa5b not found: ID does not exist" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.205049 4631 scope.go:117] "RemoveContainer" containerID="e833e3040ea35a329a540f414bd27255b574d3b651ea3a7efba64fa3fed817a2" Dec 04 17:55:07 crc kubenswrapper[4631]: E1204 17:55:07.214022 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e833e3040ea35a329a540f414bd27255b574d3b651ea3a7efba64fa3fed817a2\": container with ID starting with e833e3040ea35a329a540f414bd27255b574d3b651ea3a7efba64fa3fed817a2 not found: ID does not exist" containerID="e833e3040ea35a329a540f414bd27255b574d3b651ea3a7efba64fa3fed817a2" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.214061 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e833e3040ea35a329a540f414bd27255b574d3b651ea3a7efba64fa3fed817a2"} err="failed to get container status \"e833e3040ea35a329a540f414bd27255b574d3b651ea3a7efba64fa3fed817a2\": rpc error: code = NotFound desc = could not find container \"e833e3040ea35a329a540f414bd27255b574d3b651ea3a7efba64fa3fed817a2\": container with ID starting with e833e3040ea35a329a540f414bd27255b574d3b651ea3a7efba64fa3fed817a2 not found: ID does not exist" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.214087 4631 scope.go:117] "RemoveContainer" containerID="ba1fcee1b1cf54b06e0eded8657cebf7b66dc017f8434f1966c353c37f104084" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.249039 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.256513 4631 scope.go:117] "RemoveContainer" containerID="f3df45d8c13bc0c4ea4338ca7db675f52fb4e8786e9eaea1853623a900588c8c" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.264849 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 04 17:55:07 crc kubenswrapper[4631]: E1204 17:55:07.265439 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a71b38c4-ee61-49f2-8c8c-5adc05df2159" containerName="rabbitmq" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.265565 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a71b38c4-ee61-49f2-8c8c-5adc05df2159" containerName="rabbitmq" Dec 04 17:55:07 crc kubenswrapper[4631]: E1204 17:55:07.265671 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61d9276e-715c-402f-99e1-0cc2d58ffec9" containerName="extract-content" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.265748 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="61d9276e-715c-402f-99e1-0cc2d58ffec9" containerName="extract-content" Dec 04 17:55:07 crc kubenswrapper[4631]: E1204 17:55:07.265873 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61d9276e-715c-402f-99e1-0cc2d58ffec9" containerName="registry-server" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.265976 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="61d9276e-715c-402f-99e1-0cc2d58ffec9" containerName="registry-server" Dec 04 17:55:07 crc kubenswrapper[4631]: E1204 17:55:07.266068 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61d9276e-715c-402f-99e1-0cc2d58ffec9" containerName="extract-utilities" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.266147 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="61d9276e-715c-402f-99e1-0cc2d58ffec9" containerName="extract-utilities" Dec 04 17:55:07 crc kubenswrapper[4631]: E1204 17:55:07.266232 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" containerName="setup-container" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.266303 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" containerName="setup-container" Dec 04 17:55:07 crc kubenswrapper[4631]: E1204 17:55:07.266389 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a71b38c4-ee61-49f2-8c8c-5adc05df2159" containerName="setup-container" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.266470 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a71b38c4-ee61-49f2-8c8c-5adc05df2159" containerName="setup-container" Dec 04 17:55:07 crc kubenswrapper[4631]: E1204 17:55:07.266567 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" containerName="rabbitmq" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.266664 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" containerName="rabbitmq" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.267004 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" containerName="rabbitmq" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.267121 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="61d9276e-715c-402f-99e1-0cc2d58ffec9" containerName="registry-server" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.267230 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="a71b38c4-ee61-49f2-8c8c-5adc05df2159" containerName="rabbitmq" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.268480 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.293124 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.297434 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.297606 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.297707 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.297836 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.297643 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.298019 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.300567 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-hdwpp" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.341498 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.349312 4631 scope.go:117] "RemoveContainer" containerID="ba1fcee1b1cf54b06e0eded8657cebf7b66dc017f8434f1966c353c37f104084" Dec 04 17:55:07 crc kubenswrapper[4631]: E1204 17:55:07.349677 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba1fcee1b1cf54b06e0eded8657cebf7b66dc017f8434f1966c353c37f104084\": container with ID starting with ba1fcee1b1cf54b06e0eded8657cebf7b66dc017f8434f1966c353c37f104084 not found: ID does not exist" containerID="ba1fcee1b1cf54b06e0eded8657cebf7b66dc017f8434f1966c353c37f104084" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.349709 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba1fcee1b1cf54b06e0eded8657cebf7b66dc017f8434f1966c353c37f104084"} err="failed to get container status \"ba1fcee1b1cf54b06e0eded8657cebf7b66dc017f8434f1966c353c37f104084\": rpc error: code = NotFound desc = could not find container \"ba1fcee1b1cf54b06e0eded8657cebf7b66dc017f8434f1966c353c37f104084\": container with ID starting with ba1fcee1b1cf54b06e0eded8657cebf7b66dc017f8434f1966c353c37f104084 not found: ID does not exist" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.349734 4631 scope.go:117] "RemoveContainer" containerID="f3df45d8c13bc0c4ea4338ca7db675f52fb4e8786e9eaea1853623a900588c8c" Dec 04 17:55:07 crc kubenswrapper[4631]: E1204 17:55:07.350672 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3df45d8c13bc0c4ea4338ca7db675f52fb4e8786e9eaea1853623a900588c8c\": container with ID starting with f3df45d8c13bc0c4ea4338ca7db675f52fb4e8786e9eaea1853623a900588c8c not found: ID does not exist" containerID="f3df45d8c13bc0c4ea4338ca7db675f52fb4e8786e9eaea1853623a900588c8c" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.350709 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3df45d8c13bc0c4ea4338ca7db675f52fb4e8786e9eaea1853623a900588c8c"} err="failed to get container status \"f3df45d8c13bc0c4ea4338ca7db675f52fb4e8786e9eaea1853623a900588c8c\": rpc error: code = NotFound desc = could not find container \"f3df45d8c13bc0c4ea4338ca7db675f52fb4e8786e9eaea1853623a900588c8c\": container with ID starting with f3df45d8c13bc0c4ea4338ca7db675f52fb4e8786e9eaea1853623a900588c8c not found: ID does not exist" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.379242 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1ba76133-7ea9-4b93-abdd-426b64c09c9d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.379579 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1ba76133-7ea9-4b93-abdd-426b64c09c9d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.379763 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1ba76133-7ea9-4b93-abdd-426b64c09c9d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.379865 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1ba76133-7ea9-4b93-abdd-426b64c09c9d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.379971 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1ba76133-7ea9-4b93-abdd-426b64c09c9d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.380052 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1ba76133-7ea9-4b93-abdd-426b64c09c9d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.380152 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1ba76133-7ea9-4b93-abdd-426b64c09c9d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.380233 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1ba76133-7ea9-4b93-abdd-426b64c09c9d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.380327 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.380446 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1ba76133-7ea9-4b93-abdd-426b64c09c9d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.380531 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnfbd\" (UniqueName: \"kubernetes.io/projected/1ba76133-7ea9-4b93-abdd-426b64c09c9d-kube-api-access-mnfbd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.407028 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.466534 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.468270 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.475034 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.475889 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.478039 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.484845 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1ba76133-7ea9-4b93-abdd-426b64c09c9d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.484911 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1ba76133-7ea9-4b93-abdd-426b64c09c9d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.484936 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1ba76133-7ea9-4b93-abdd-426b64c09c9d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.484980 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1ba76133-7ea9-4b93-abdd-426b64c09c9d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.485001 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1ba76133-7ea9-4b93-abdd-426b64c09c9d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.485034 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.485078 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1ba76133-7ea9-4b93-abdd-426b64c09c9d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.485093 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnfbd\" (UniqueName: \"kubernetes.io/projected/1ba76133-7ea9-4b93-abdd-426b64c09c9d-kube-api-access-mnfbd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.485115 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1ba76133-7ea9-4b93-abdd-426b64c09c9d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.485138 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1ba76133-7ea9-4b93-abdd-426b64c09c9d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.485153 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1ba76133-7ea9-4b93-abdd-426b64c09c9d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.486514 4631 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.487043 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1ba76133-7ea9-4b93-abdd-426b64c09c9d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.488895 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.489113 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.489224 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.489335 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.489482 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-rtjh5" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.489921 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1ba76133-7ea9-4b93-abdd-426b64c09c9d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.490914 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1ba76133-7ea9-4b93-abdd-426b64c09c9d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.491088 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1ba76133-7ea9-4b93-abdd-426b64c09c9d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.491527 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1ba76133-7ea9-4b93-abdd-426b64c09c9d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.491581 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1ba76133-7ea9-4b93-abdd-426b64c09c9d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.491761 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1ba76133-7ea9-4b93-abdd-426b64c09c9d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.492999 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1ba76133-7ea9-4b93-abdd-426b64c09c9d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.493343 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1ba76133-7ea9-4b93-abdd-426b64c09c9d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.515089 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnfbd\" (UniqueName: \"kubernetes.io/projected/1ba76133-7ea9-4b93-abdd-426b64c09c9d-kube-api-access-mnfbd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.533474 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1ba76133-7ea9-4b93-abdd-426b64c09c9d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.586675 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9ef0c479-0169-423e-9619-fbf9f7e63a97-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.586723 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9ef0c479-0169-423e-9619-fbf9f7e63a97-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.586764 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9ef0c479-0169-423e-9619-fbf9f7e63a97-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.586802 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.586830 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9ef0c479-0169-423e-9619-fbf9f7e63a97-config-data\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.586854 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9ef0c479-0169-423e-9619-fbf9f7e63a97-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.586891 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gz4q9\" (UniqueName: \"kubernetes.io/projected/9ef0c479-0169-423e-9619-fbf9f7e63a97-kube-api-access-gz4q9\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.586922 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9ef0c479-0169-423e-9619-fbf9f7e63a97-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.586961 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9ef0c479-0169-423e-9619-fbf9f7e63a97-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.587000 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9ef0c479-0169-423e-9619-fbf9f7e63a97-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.587021 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9ef0c479-0169-423e-9619-fbf9f7e63a97-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.621554 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.689075 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9ef0c479-0169-423e-9619-fbf9f7e63a97-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.689646 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9ef0c479-0169-423e-9619-fbf9f7e63a97-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.689817 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9ef0c479-0169-423e-9619-fbf9f7e63a97-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.689917 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9ef0c479-0169-423e-9619-fbf9f7e63a97-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.690035 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9ef0c479-0169-423e-9619-fbf9f7e63a97-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.690128 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9ef0c479-0169-423e-9619-fbf9f7e63a97-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.690233 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9ef0c479-0169-423e-9619-fbf9f7e63a97-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.690341 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.690458 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9ef0c479-0169-423e-9619-fbf9f7e63a97-config-data\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.690548 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9ef0c479-0169-423e-9619-fbf9f7e63a97-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.690673 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gz4q9\" (UniqueName: \"kubernetes.io/projected/9ef0c479-0169-423e-9619-fbf9f7e63a97-kube-api-access-gz4q9\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.690833 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9ef0c479-0169-423e-9619-fbf9f7e63a97-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.691044 4631 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.691189 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9ef0c479-0169-423e-9619-fbf9f7e63a97-server-conf\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.691720 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9ef0c479-0169-423e-9619-fbf9f7e63a97-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.691829 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9ef0c479-0169-423e-9619-fbf9f7e63a97-config-data\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.692303 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9ef0c479-0169-423e-9619-fbf9f7e63a97-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.692788 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9ef0c479-0169-423e-9619-fbf9f7e63a97-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.693951 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9ef0c479-0169-423e-9619-fbf9f7e63a97-pod-info\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.695409 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9ef0c479-0169-423e-9619-fbf9f7e63a97-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.696063 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9ef0c479-0169-423e-9619-fbf9f7e63a97-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.710087 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gz4q9\" (UniqueName: \"kubernetes.io/projected/9ef0c479-0169-423e-9619-fbf9f7e63a97-kube-api-access-gz4q9\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.768664 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"9ef0c479-0169-423e-9619-fbf9f7e63a97\") " pod="openstack/rabbitmq-server-0" Dec 04 17:55:07 crc kubenswrapper[4631]: I1204 17:55:07.792714 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 04 17:55:08 crc kubenswrapper[4631]: I1204 17:55:08.104187 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 04 17:55:08 crc kubenswrapper[4631]: I1204 17:55:08.256499 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ad48d12-6e35-428b-ac2e-ee6c2cf668ef" path="/var/lib/kubelet/pods/2ad48d12-6e35-428b-ac2e-ee6c2cf668ef/volumes" Dec 04 17:55:08 crc kubenswrapper[4631]: I1204 17:55:08.257526 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a71b38c4-ee61-49f2-8c8c-5adc05df2159" path="/var/lib/kubelet/pods/a71b38c4-ee61-49f2-8c8c-5adc05df2159/volumes" Dec 04 17:55:08 crc kubenswrapper[4631]: I1204 17:55:08.362230 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.014147 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-xq6rz"] Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.016261 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.031707 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.044426 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1ba76133-7ea9-4b93-abdd-426b64c09c9d","Type":"ContainerStarted","Data":"98a5300c04641bac73ed87d9b8a5938b28508daff6370921479665901e13a317"} Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.049167 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-xq6rz"] Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.055270 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9ef0c479-0169-423e-9619-fbf9f7e63a97","Type":"ContainerStarted","Data":"a69ab43a6dcdbccd1fc26c9575bfb73511ddeb3526c2f279d69bc2b358cc1bb5"} Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.130516 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.130618 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.130678 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-config\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.130768 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfk8r\" (UniqueName: \"kubernetes.io/projected/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-kube-api-access-tfk8r\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.130847 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.130894 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-dns-svc\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.130964 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.232781 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.232879 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-config\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.232957 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfk8r\" (UniqueName: \"kubernetes.io/projected/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-kube-api-access-tfk8r\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.233012 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.233033 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-dns-svc\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.233078 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.233098 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.233957 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.233984 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-dns-svc\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.234057 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.234122 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.234222 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.234641 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-config\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.269457 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfk8r\" (UniqueName: \"kubernetes.io/projected/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-kube-api-access-tfk8r\") pod \"dnsmasq-dns-67b789f86c-xq6rz\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.374856 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:09 crc kubenswrapper[4631]: I1204 17:55:09.956678 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-xq6rz"] Dec 04 17:55:10 crc kubenswrapper[4631]: I1204 17:55:10.070470 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1ba76133-7ea9-4b93-abdd-426b64c09c9d","Type":"ContainerStarted","Data":"91ffcf6c7ed56096e1a917667a13c15340fc45ef122ffde93ca7204ad8c91ad8"} Dec 04 17:55:10 crc kubenswrapper[4631]: I1204 17:55:10.073670 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9ef0c479-0169-423e-9619-fbf9f7e63a97","Type":"ContainerStarted","Data":"9fda7edce7699016c325d119bece221b9dccee1d157accf0189b34c01240a799"} Dec 04 17:55:10 crc kubenswrapper[4631]: I1204 17:55:10.075447 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" event={"ID":"a714bb29-1c3a-4c0f-abf9-0485d253a3ce","Type":"ContainerStarted","Data":"225b78cebb0ac2910496498506ba36e1c0d468a34219819cb5560e438f2d282c"} Dec 04 17:55:11 crc kubenswrapper[4631]: I1204 17:55:11.085862 4631 generic.go:334] "Generic (PLEG): container finished" podID="a714bb29-1c3a-4c0f-abf9-0485d253a3ce" containerID="fefff114ead879286581fa0d9f45665c56bcc72f4f5ad1b0d7ad63dafe8b8a76" exitCode=0 Dec 04 17:55:11 crc kubenswrapper[4631]: I1204 17:55:11.085923 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" event={"ID":"a714bb29-1c3a-4c0f-abf9-0485d253a3ce","Type":"ContainerDied","Data":"fefff114ead879286581fa0d9f45665c56bcc72f4f5ad1b0d7ad63dafe8b8a76"} Dec 04 17:55:12 crc kubenswrapper[4631]: I1204 17:55:12.095271 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" event={"ID":"a714bb29-1c3a-4c0f-abf9-0485d253a3ce","Type":"ContainerStarted","Data":"f68201039f667f6f94b85ddbaafb37bcd90b85b5e19d0fee132ca52e4c55970e"} Dec 04 17:55:12 crc kubenswrapper[4631]: I1204 17:55:12.096219 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:12 crc kubenswrapper[4631]: I1204 17:55:12.127140 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" podStartSLOduration=4.127122814 podStartE2EDuration="4.127122814s" podCreationTimestamp="2025-12-04 17:55:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:55:12.112437252 +0000 UTC m=+1642.144679260" watchObservedRunningTime="2025-12-04 17:55:12.127122814 +0000 UTC m=+1642.159364812" Dec 04 17:55:14 crc kubenswrapper[4631]: I1204 17:55:14.360744 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dmbbs" Dec 04 17:55:14 crc kubenswrapper[4631]: I1204 17:55:14.405101 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dmbbs" Dec 04 17:55:15 crc kubenswrapper[4631]: I1204 17:55:15.205600 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dmbbs"] Dec 04 17:55:16 crc kubenswrapper[4631]: I1204 17:55:16.131647 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dmbbs" podUID="249c0713-d835-4d5f-a14d-f0cc6371eac4" containerName="registry-server" containerID="cri-o://96732f397ddce18c0e020861671ba2319fb4f6e4e5df8c306d1cdf3be643c462" gracePeriod=2 Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.072800 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dmbbs" Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.124669 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/249c0713-d835-4d5f-a14d-f0cc6371eac4-catalog-content\") pod \"249c0713-d835-4d5f-a14d-f0cc6371eac4\" (UID: \"249c0713-d835-4d5f-a14d-f0cc6371eac4\") " Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.124728 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tp9rs\" (UniqueName: \"kubernetes.io/projected/249c0713-d835-4d5f-a14d-f0cc6371eac4-kube-api-access-tp9rs\") pod \"249c0713-d835-4d5f-a14d-f0cc6371eac4\" (UID: \"249c0713-d835-4d5f-a14d-f0cc6371eac4\") " Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.124749 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/249c0713-d835-4d5f-a14d-f0cc6371eac4-utilities\") pod \"249c0713-d835-4d5f-a14d-f0cc6371eac4\" (UID: \"249c0713-d835-4d5f-a14d-f0cc6371eac4\") " Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.125722 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/249c0713-d835-4d5f-a14d-f0cc6371eac4-utilities" (OuterVolumeSpecName: "utilities") pod "249c0713-d835-4d5f-a14d-f0cc6371eac4" (UID: "249c0713-d835-4d5f-a14d-f0cc6371eac4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.134092 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/249c0713-d835-4d5f-a14d-f0cc6371eac4-kube-api-access-tp9rs" (OuterVolumeSpecName: "kube-api-access-tp9rs") pod "249c0713-d835-4d5f-a14d-f0cc6371eac4" (UID: "249c0713-d835-4d5f-a14d-f0cc6371eac4"). InnerVolumeSpecName "kube-api-access-tp9rs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.145888 4631 generic.go:334] "Generic (PLEG): container finished" podID="249c0713-d835-4d5f-a14d-f0cc6371eac4" containerID="96732f397ddce18c0e020861671ba2319fb4f6e4e5df8c306d1cdf3be643c462" exitCode=0 Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.145930 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dmbbs" event={"ID":"249c0713-d835-4d5f-a14d-f0cc6371eac4","Type":"ContainerDied","Data":"96732f397ddce18c0e020861671ba2319fb4f6e4e5df8c306d1cdf3be643c462"} Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.145955 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dmbbs" event={"ID":"249c0713-d835-4d5f-a14d-f0cc6371eac4","Type":"ContainerDied","Data":"ea86015849a54c83a0c7027bdbd6b531c91145f8b81726d4bdd8ae79abb9a40f"} Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.145972 4631 scope.go:117] "RemoveContainer" containerID="96732f397ddce18c0e020861671ba2319fb4f6e4e5df8c306d1cdf3be643c462" Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.146087 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dmbbs" Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.187605 4631 scope.go:117] "RemoveContainer" containerID="3de639a0f785eb9ded5666eef74465f2f5828fae563d691d295df9706af3643b" Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.215508 4631 scope.go:117] "RemoveContainer" containerID="63ba5c58090e3b44170c85f90b7cd6757e29143e3c9029eed1627eef077b47f2" Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.226959 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tp9rs\" (UniqueName: \"kubernetes.io/projected/249c0713-d835-4d5f-a14d-f0cc6371eac4-kube-api-access-tp9rs\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.226988 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/249c0713-d835-4d5f-a14d-f0cc6371eac4-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.241678 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/249c0713-d835-4d5f-a14d-f0cc6371eac4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "249c0713-d835-4d5f-a14d-f0cc6371eac4" (UID: "249c0713-d835-4d5f-a14d-f0cc6371eac4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.265066 4631 scope.go:117] "RemoveContainer" containerID="96732f397ddce18c0e020861671ba2319fb4f6e4e5df8c306d1cdf3be643c462" Dec 04 17:55:17 crc kubenswrapper[4631]: E1204 17:55:17.266637 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96732f397ddce18c0e020861671ba2319fb4f6e4e5df8c306d1cdf3be643c462\": container with ID starting with 96732f397ddce18c0e020861671ba2319fb4f6e4e5df8c306d1cdf3be643c462 not found: ID does not exist" containerID="96732f397ddce18c0e020861671ba2319fb4f6e4e5df8c306d1cdf3be643c462" Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.266684 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96732f397ddce18c0e020861671ba2319fb4f6e4e5df8c306d1cdf3be643c462"} err="failed to get container status \"96732f397ddce18c0e020861671ba2319fb4f6e4e5df8c306d1cdf3be643c462\": rpc error: code = NotFound desc = could not find container \"96732f397ddce18c0e020861671ba2319fb4f6e4e5df8c306d1cdf3be643c462\": container with ID starting with 96732f397ddce18c0e020861671ba2319fb4f6e4e5df8c306d1cdf3be643c462 not found: ID does not exist" Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.266705 4631 scope.go:117] "RemoveContainer" containerID="3de639a0f785eb9ded5666eef74465f2f5828fae563d691d295df9706af3643b" Dec 04 17:55:17 crc kubenswrapper[4631]: E1204 17:55:17.267188 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3de639a0f785eb9ded5666eef74465f2f5828fae563d691d295df9706af3643b\": container with ID starting with 3de639a0f785eb9ded5666eef74465f2f5828fae563d691d295df9706af3643b not found: ID does not exist" containerID="3de639a0f785eb9ded5666eef74465f2f5828fae563d691d295df9706af3643b" Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.267211 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3de639a0f785eb9ded5666eef74465f2f5828fae563d691d295df9706af3643b"} err="failed to get container status \"3de639a0f785eb9ded5666eef74465f2f5828fae563d691d295df9706af3643b\": rpc error: code = NotFound desc = could not find container \"3de639a0f785eb9ded5666eef74465f2f5828fae563d691d295df9706af3643b\": container with ID starting with 3de639a0f785eb9ded5666eef74465f2f5828fae563d691d295df9706af3643b not found: ID does not exist" Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.267344 4631 scope.go:117] "RemoveContainer" containerID="63ba5c58090e3b44170c85f90b7cd6757e29143e3c9029eed1627eef077b47f2" Dec 04 17:55:17 crc kubenswrapper[4631]: E1204 17:55:17.268468 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63ba5c58090e3b44170c85f90b7cd6757e29143e3c9029eed1627eef077b47f2\": container with ID starting with 63ba5c58090e3b44170c85f90b7cd6757e29143e3c9029eed1627eef077b47f2 not found: ID does not exist" containerID="63ba5c58090e3b44170c85f90b7cd6757e29143e3c9029eed1627eef077b47f2" Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.268494 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63ba5c58090e3b44170c85f90b7cd6757e29143e3c9029eed1627eef077b47f2"} err="failed to get container status \"63ba5c58090e3b44170c85f90b7cd6757e29143e3c9029eed1627eef077b47f2\": rpc error: code = NotFound desc = could not find container \"63ba5c58090e3b44170c85f90b7cd6757e29143e3c9029eed1627eef077b47f2\": container with ID starting with 63ba5c58090e3b44170c85f90b7cd6757e29143e3c9029eed1627eef077b47f2 not found: ID does not exist" Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.328640 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/249c0713-d835-4d5f-a14d-f0cc6371eac4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.499517 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dmbbs"] Dec 04 17:55:17 crc kubenswrapper[4631]: I1204 17:55:17.512755 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dmbbs"] Dec 04 17:55:18 crc kubenswrapper[4631]: I1204 17:55:18.258196 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="249c0713-d835-4d5f-a14d-f0cc6371eac4" path="/var/lib/kubelet/pods/249c0713-d835-4d5f-a14d-f0cc6371eac4/volumes" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.239027 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:55:19 crc kubenswrapper[4631]: E1204 17:55:19.239616 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.376600 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.459108 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-p977t"] Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.459426 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" podUID="308eec4b-712a-4be1-af17-846432557cfb" containerName="dnsmasq-dns" containerID="cri-o://7438a7e0f1afc74a44acb14c1f75a6ca23208ca2a6646225a9f779cb3d9b0a4b" gracePeriod=10 Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.647062 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7b659bdd7f-ndp7s"] Dec 04 17:55:19 crc kubenswrapper[4631]: E1204 17:55:19.647985 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="249c0713-d835-4d5f-a14d-f0cc6371eac4" containerName="extract-content" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.648003 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="249c0713-d835-4d5f-a14d-f0cc6371eac4" containerName="extract-content" Dec 04 17:55:19 crc kubenswrapper[4631]: E1204 17:55:19.648031 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="249c0713-d835-4d5f-a14d-f0cc6371eac4" containerName="extract-utilities" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.648038 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="249c0713-d835-4d5f-a14d-f0cc6371eac4" containerName="extract-utilities" Dec 04 17:55:19 crc kubenswrapper[4631]: E1204 17:55:19.648047 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="249c0713-d835-4d5f-a14d-f0cc6371eac4" containerName="registry-server" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.648052 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="249c0713-d835-4d5f-a14d-f0cc6371eac4" containerName="registry-server" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.648244 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="249c0713-d835-4d5f-a14d-f0cc6371eac4" containerName="registry-server" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.649168 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.673279 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b659bdd7f-ndp7s"] Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.679814 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/52592900-79a1-4fa6-8eb3-628f25972f5f-ovsdbserver-sb\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.679860 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/52592900-79a1-4fa6-8eb3-628f25972f5f-dns-svc\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.679909 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/52592900-79a1-4fa6-8eb3-628f25972f5f-dns-swift-storage-0\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.679933 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52592900-79a1-4fa6-8eb3-628f25972f5f-config\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.679976 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/52592900-79a1-4fa6-8eb3-628f25972f5f-openstack-edpm-ipam\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.680036 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96j7j\" (UniqueName: \"kubernetes.io/projected/52592900-79a1-4fa6-8eb3-628f25972f5f-kube-api-access-96j7j\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.680111 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/52592900-79a1-4fa6-8eb3-628f25972f5f-ovsdbserver-nb\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.782012 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/52592900-79a1-4fa6-8eb3-628f25972f5f-ovsdbserver-nb\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.782056 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/52592900-79a1-4fa6-8eb3-628f25972f5f-ovsdbserver-sb\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.782078 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/52592900-79a1-4fa6-8eb3-628f25972f5f-dns-svc\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.782119 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/52592900-79a1-4fa6-8eb3-628f25972f5f-dns-swift-storage-0\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.782140 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52592900-79a1-4fa6-8eb3-628f25972f5f-config\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.782180 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/52592900-79a1-4fa6-8eb3-628f25972f5f-openstack-edpm-ipam\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.782228 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96j7j\" (UniqueName: \"kubernetes.io/projected/52592900-79a1-4fa6-8eb3-628f25972f5f-kube-api-access-96j7j\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.784840 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/52592900-79a1-4fa6-8eb3-628f25972f5f-ovsdbserver-sb\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.785340 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52592900-79a1-4fa6-8eb3-628f25972f5f-config\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.785908 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/52592900-79a1-4fa6-8eb3-628f25972f5f-dns-swift-storage-0\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.786175 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/52592900-79a1-4fa6-8eb3-628f25972f5f-openstack-edpm-ipam\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.786673 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/52592900-79a1-4fa6-8eb3-628f25972f5f-ovsdbserver-nb\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.786863 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/52592900-79a1-4fa6-8eb3-628f25972f5f-dns-svc\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.800261 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96j7j\" (UniqueName: \"kubernetes.io/projected/52592900-79a1-4fa6-8eb3-628f25972f5f-kube-api-access-96j7j\") pod \"dnsmasq-dns-7b659bdd7f-ndp7s\" (UID: \"52592900-79a1-4fa6-8eb3-628f25972f5f\") " pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:19 crc kubenswrapper[4631]: I1204 17:55:19.980295 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.075355 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.096946 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-dns-swift-storage-0\") pod \"308eec4b-712a-4be1-af17-846432557cfb\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.097058 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-dns-svc\") pod \"308eec4b-712a-4be1-af17-846432557cfb\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.097100 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7dvw4\" (UniqueName: \"kubernetes.io/projected/308eec4b-712a-4be1-af17-846432557cfb-kube-api-access-7dvw4\") pod \"308eec4b-712a-4be1-af17-846432557cfb\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.097163 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-ovsdbserver-nb\") pod \"308eec4b-712a-4be1-af17-846432557cfb\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.097216 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-ovsdbserver-sb\") pod \"308eec4b-712a-4be1-af17-846432557cfb\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.097281 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-config\") pod \"308eec4b-712a-4be1-af17-846432557cfb\" (UID: \"308eec4b-712a-4be1-af17-846432557cfb\") " Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.105187 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308eec4b-712a-4be1-af17-846432557cfb-kube-api-access-7dvw4" (OuterVolumeSpecName: "kube-api-access-7dvw4") pod "308eec4b-712a-4be1-af17-846432557cfb" (UID: "308eec4b-712a-4be1-af17-846432557cfb"). InnerVolumeSpecName "kube-api-access-7dvw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.176906 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "308eec4b-712a-4be1-af17-846432557cfb" (UID: "308eec4b-712a-4be1-af17-846432557cfb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.190484 4631 generic.go:334] "Generic (PLEG): container finished" podID="308eec4b-712a-4be1-af17-846432557cfb" containerID="7438a7e0f1afc74a44acb14c1f75a6ca23208ca2a6646225a9f779cb3d9b0a4b" exitCode=0 Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.190518 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" event={"ID":"308eec4b-712a-4be1-af17-846432557cfb","Type":"ContainerDied","Data":"7438a7e0f1afc74a44acb14c1f75a6ca23208ca2a6646225a9f779cb3d9b0a4b"} Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.190554 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" event={"ID":"308eec4b-712a-4be1-af17-846432557cfb","Type":"ContainerDied","Data":"30d5c96d86a3d22d7a4a43d6327163b4d189aaf9c4dca499baa744d2432e0f61"} Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.190570 4631 scope.go:117] "RemoveContainer" containerID="7438a7e0f1afc74a44acb14c1f75a6ca23208ca2a6646225a9f779cb3d9b0a4b" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.190689 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-p977t" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.199241 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7dvw4\" (UniqueName: \"kubernetes.io/projected/308eec4b-712a-4be1-af17-846432557cfb-kube-api-access-7dvw4\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.199265 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.211457 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "308eec4b-712a-4be1-af17-846432557cfb" (UID: "308eec4b-712a-4be1-af17-846432557cfb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.211994 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "308eec4b-712a-4be1-af17-846432557cfb" (UID: "308eec4b-712a-4be1-af17-846432557cfb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.228239 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "308eec4b-712a-4be1-af17-846432557cfb" (UID: "308eec4b-712a-4be1-af17-846432557cfb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.242296 4631 scope.go:117] "RemoveContainer" containerID="08377cf13fccbec161eadd9db0f1298e6eed7891df03115307a31df519942da3" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.246220 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-config" (OuterVolumeSpecName: "config") pod "308eec4b-712a-4be1-af17-846432557cfb" (UID: "308eec4b-712a-4be1-af17-846432557cfb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.266846 4631 scope.go:117] "RemoveContainer" containerID="7438a7e0f1afc74a44acb14c1f75a6ca23208ca2a6646225a9f779cb3d9b0a4b" Dec 04 17:55:20 crc kubenswrapper[4631]: E1204 17:55:20.270089 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7438a7e0f1afc74a44acb14c1f75a6ca23208ca2a6646225a9f779cb3d9b0a4b\": container with ID starting with 7438a7e0f1afc74a44acb14c1f75a6ca23208ca2a6646225a9f779cb3d9b0a4b not found: ID does not exist" containerID="7438a7e0f1afc74a44acb14c1f75a6ca23208ca2a6646225a9f779cb3d9b0a4b" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.270184 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7438a7e0f1afc74a44acb14c1f75a6ca23208ca2a6646225a9f779cb3d9b0a4b"} err="failed to get container status \"7438a7e0f1afc74a44acb14c1f75a6ca23208ca2a6646225a9f779cb3d9b0a4b\": rpc error: code = NotFound desc = could not find container \"7438a7e0f1afc74a44acb14c1f75a6ca23208ca2a6646225a9f779cb3d9b0a4b\": container with ID starting with 7438a7e0f1afc74a44acb14c1f75a6ca23208ca2a6646225a9f779cb3d9b0a4b not found: ID does not exist" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.270260 4631 scope.go:117] "RemoveContainer" containerID="08377cf13fccbec161eadd9db0f1298e6eed7891df03115307a31df519942da3" Dec 04 17:55:20 crc kubenswrapper[4631]: E1204 17:55:20.270733 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08377cf13fccbec161eadd9db0f1298e6eed7891df03115307a31df519942da3\": container with ID starting with 08377cf13fccbec161eadd9db0f1298e6eed7891df03115307a31df519942da3 not found: ID does not exist" containerID="08377cf13fccbec161eadd9db0f1298e6eed7891df03115307a31df519942da3" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.270761 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08377cf13fccbec161eadd9db0f1298e6eed7891df03115307a31df519942da3"} err="failed to get container status \"08377cf13fccbec161eadd9db0f1298e6eed7891df03115307a31df519942da3\": rpc error: code = NotFound desc = could not find container \"08377cf13fccbec161eadd9db0f1298e6eed7891df03115307a31df519942da3\": container with ID starting with 08377cf13fccbec161eadd9db0f1298e6eed7891df03115307a31df519942da3 not found: ID does not exist" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.300902 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.300946 4631 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.300960 4631 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.300972 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/308eec4b-712a-4be1-af17-846432557cfb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.304275 4631 scope.go:117] "RemoveContainer" containerID="455e1ee9ee6c43605f18f76355220fdd58d5b14ee824b79f6f5eac1cffd96926" Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.474463 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b659bdd7f-ndp7s"] Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.516808 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-p977t"] Dec 04 17:55:20 crc kubenswrapper[4631]: I1204 17:55:20.525820 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-p977t"] Dec 04 17:55:21 crc kubenswrapper[4631]: I1204 17:55:21.200082 4631 generic.go:334] "Generic (PLEG): container finished" podID="52592900-79a1-4fa6-8eb3-628f25972f5f" containerID="192892f10599304602bb2449d1a76423e8b95f2082c159373dc620e3c602e18a" exitCode=0 Dec 04 17:55:21 crc kubenswrapper[4631]: I1204 17:55:21.200185 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" event={"ID":"52592900-79a1-4fa6-8eb3-628f25972f5f","Type":"ContainerDied","Data":"192892f10599304602bb2449d1a76423e8b95f2082c159373dc620e3c602e18a"} Dec 04 17:55:21 crc kubenswrapper[4631]: I1204 17:55:21.200609 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" event={"ID":"52592900-79a1-4fa6-8eb3-628f25972f5f","Type":"ContainerStarted","Data":"a0e2591cbf98ad700e5b7bbbab94e0e8717b1d9c57d2a651df957819196f860a"} Dec 04 17:55:22 crc kubenswrapper[4631]: I1204 17:55:22.209805 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" event={"ID":"52592900-79a1-4fa6-8eb3-628f25972f5f","Type":"ContainerStarted","Data":"d50d295e8843d5d41c7cad0dce9bd7501d7b87f607ee50eca3fcaa7af7734ba1"} Dec 04 17:55:22 crc kubenswrapper[4631]: I1204 17:55:22.210142 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:22 crc kubenswrapper[4631]: I1204 17:55:22.230213 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" podStartSLOduration=3.230195327 podStartE2EDuration="3.230195327s" podCreationTimestamp="2025-12-04 17:55:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:55:22.227489042 +0000 UTC m=+1652.259731040" watchObservedRunningTime="2025-12-04 17:55:22.230195327 +0000 UTC m=+1652.262437325" Dec 04 17:55:22 crc kubenswrapper[4631]: I1204 17:55:22.250148 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308eec4b-712a-4be1-af17-846432557cfb" path="/var/lib/kubelet/pods/308eec4b-712a-4be1-af17-846432557cfb/volumes" Dec 04 17:55:29 crc kubenswrapper[4631]: I1204 17:55:29.981989 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7b659bdd7f-ndp7s" Dec 04 17:55:30 crc kubenswrapper[4631]: I1204 17:55:30.092573 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-xq6rz"] Dec 04 17:55:30 crc kubenswrapper[4631]: I1204 17:55:30.092800 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" podUID="a714bb29-1c3a-4c0f-abf9-0485d253a3ce" containerName="dnsmasq-dns" containerID="cri-o://f68201039f667f6f94b85ddbaafb37bcd90b85b5e19d0fee132ca52e4c55970e" gracePeriod=10 Dec 04 17:55:30 crc kubenswrapper[4631]: I1204 17:55:30.245634 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:55:30 crc kubenswrapper[4631]: E1204 17:55:30.245871 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:55:30 crc kubenswrapper[4631]: I1204 17:55:30.290114 4631 generic.go:334] "Generic (PLEG): container finished" podID="a714bb29-1c3a-4c0f-abf9-0485d253a3ce" containerID="f68201039f667f6f94b85ddbaafb37bcd90b85b5e19d0fee132ca52e4c55970e" exitCode=0 Dec 04 17:55:30 crc kubenswrapper[4631]: I1204 17:55:30.290150 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" event={"ID":"a714bb29-1c3a-4c0f-abf9-0485d253a3ce","Type":"ContainerDied","Data":"f68201039f667f6f94b85ddbaafb37bcd90b85b5e19d0fee132ca52e4c55970e"} Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.127227 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.210344 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfk8r\" (UniqueName: \"kubernetes.io/projected/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-kube-api-access-tfk8r\") pod \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.210423 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-ovsdbserver-nb\") pod \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.210514 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-dns-swift-storage-0\") pod \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.210577 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-dns-svc\") pod \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.210608 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-config\") pod \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.210673 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-ovsdbserver-sb\") pod \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.210696 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-openstack-edpm-ipam\") pod \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\" (UID: \"a714bb29-1c3a-4c0f-abf9-0485d253a3ce\") " Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.231984 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-kube-api-access-tfk8r" (OuterVolumeSpecName: "kube-api-access-tfk8r") pod "a714bb29-1c3a-4c0f-abf9-0485d253a3ce" (UID: "a714bb29-1c3a-4c0f-abf9-0485d253a3ce"). InnerVolumeSpecName "kube-api-access-tfk8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.312584 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfk8r\" (UniqueName: \"kubernetes.io/projected/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-kube-api-access-tfk8r\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.327938 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" event={"ID":"a714bb29-1c3a-4c0f-abf9-0485d253a3ce","Type":"ContainerDied","Data":"225b78cebb0ac2910496498506ba36e1c0d468a34219819cb5560e438f2d282c"} Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.327997 4631 scope.go:117] "RemoveContainer" containerID="f68201039f667f6f94b85ddbaafb37bcd90b85b5e19d0fee132ca52e4c55970e" Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.328182 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-xq6rz" Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.372074 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "a714bb29-1c3a-4c0f-abf9-0485d253a3ce" (UID: "a714bb29-1c3a-4c0f-abf9-0485d253a3ce"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.395530 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-config" (OuterVolumeSpecName: "config") pod "a714bb29-1c3a-4c0f-abf9-0485d253a3ce" (UID: "a714bb29-1c3a-4c0f-abf9-0485d253a3ce"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.411083 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a714bb29-1c3a-4c0f-abf9-0485d253a3ce" (UID: "a714bb29-1c3a-4c0f-abf9-0485d253a3ce"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.411270 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a714bb29-1c3a-4c0f-abf9-0485d253a3ce" (UID: "a714bb29-1c3a-4c0f-abf9-0485d253a3ce"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.411661 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a714bb29-1c3a-4c0f-abf9-0485d253a3ce" (UID: "a714bb29-1c3a-4c0f-abf9-0485d253a3ce"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.414075 4631 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.414101 4631 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-config\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.414114 4631 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.414127 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.414144 4631 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.435346 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a714bb29-1c3a-4c0f-abf9-0485d253a3ce" (UID: "a714bb29-1c3a-4c0f-abf9-0485d253a3ce"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.515652 4631 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a714bb29-1c3a-4c0f-abf9-0485d253a3ce-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.517632 4631 scope.go:117] "RemoveContainer" containerID="fefff114ead879286581fa0d9f45665c56bcc72f4f5ad1b0d7ad63dafe8b8a76" Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.664008 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-xq6rz"] Dec 04 17:55:31 crc kubenswrapper[4631]: I1204 17:55:31.672906 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-xq6rz"] Dec 04 17:55:32 crc kubenswrapper[4631]: I1204 17:55:32.252064 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a714bb29-1c3a-4c0f-abf9-0485d253a3ce" path="/var/lib/kubelet/pods/a714bb29-1c3a-4c0f-abf9-0485d253a3ce/volumes" Dec 04 17:55:41 crc kubenswrapper[4631]: I1204 17:55:41.418444 4631 generic.go:334] "Generic (PLEG): container finished" podID="1ba76133-7ea9-4b93-abdd-426b64c09c9d" containerID="91ffcf6c7ed56096e1a917667a13c15340fc45ef122ffde93ca7204ad8c91ad8" exitCode=0 Dec 04 17:55:41 crc kubenswrapper[4631]: I1204 17:55:41.418679 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1ba76133-7ea9-4b93-abdd-426b64c09c9d","Type":"ContainerDied","Data":"91ffcf6c7ed56096e1a917667a13c15340fc45ef122ffde93ca7204ad8c91ad8"} Dec 04 17:55:42 crc kubenswrapper[4631]: I1204 17:55:42.430320 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1ba76133-7ea9-4b93-abdd-426b64c09c9d","Type":"ContainerStarted","Data":"81783bdbabe435cf27cec000018ec14c5390ea8553f2dfdc801f3f212b1289aa"} Dec 04 17:55:42 crc kubenswrapper[4631]: I1204 17:55:42.432404 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:42 crc kubenswrapper[4631]: I1204 17:55:42.436297 4631 generic.go:334] "Generic (PLEG): container finished" podID="9ef0c479-0169-423e-9619-fbf9f7e63a97" containerID="9fda7edce7699016c325d119bece221b9dccee1d157accf0189b34c01240a799" exitCode=0 Dec 04 17:55:42 crc kubenswrapper[4631]: I1204 17:55:42.436328 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9ef0c479-0169-423e-9619-fbf9f7e63a97","Type":"ContainerDied","Data":"9fda7edce7699016c325d119bece221b9dccee1d157accf0189b34c01240a799"} Dec 04 17:55:42 crc kubenswrapper[4631]: I1204 17:55:42.465832 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=35.4658126 podStartE2EDuration="35.4658126s" podCreationTimestamp="2025-12-04 17:55:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:55:42.461686984 +0000 UTC m=+1672.493928982" watchObservedRunningTime="2025-12-04 17:55:42.4658126 +0000 UTC m=+1672.498054608" Dec 04 17:55:44 crc kubenswrapper[4631]: I1204 17:55:44.239361 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:55:44 crc kubenswrapper[4631]: E1204 17:55:44.241112 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:55:44 crc kubenswrapper[4631]: I1204 17:55:44.461570 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"9ef0c479-0169-423e-9619-fbf9f7e63a97","Type":"ContainerStarted","Data":"d230015795ec2f6373ea809ffe3289f22783966e2024aa0a7290467613fabb0a"} Dec 04 17:55:44 crc kubenswrapper[4631]: I1204 17:55:44.462617 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 04 17:55:44 crc kubenswrapper[4631]: I1204 17:55:44.493000 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.492980331 podStartE2EDuration="37.492980331s" podCreationTimestamp="2025-12-04 17:55:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 17:55:44.481805147 +0000 UTC m=+1674.514047155" watchObservedRunningTime="2025-12-04 17:55:44.492980331 +0000 UTC m=+1674.525222329" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.329185 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg"] Dec 04 17:55:53 crc kubenswrapper[4631]: E1204 17:55:53.330142 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="308eec4b-712a-4be1-af17-846432557cfb" containerName="init" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.330157 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="308eec4b-712a-4be1-af17-846432557cfb" containerName="init" Dec 04 17:55:53 crc kubenswrapper[4631]: E1204 17:55:53.330172 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a714bb29-1c3a-4c0f-abf9-0485d253a3ce" containerName="dnsmasq-dns" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.330179 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a714bb29-1c3a-4c0f-abf9-0485d253a3ce" containerName="dnsmasq-dns" Dec 04 17:55:53 crc kubenswrapper[4631]: E1204 17:55:53.330193 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="308eec4b-712a-4be1-af17-846432557cfb" containerName="dnsmasq-dns" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.330200 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="308eec4b-712a-4be1-af17-846432557cfb" containerName="dnsmasq-dns" Dec 04 17:55:53 crc kubenswrapper[4631]: E1204 17:55:53.330211 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a714bb29-1c3a-4c0f-abf9-0485d253a3ce" containerName="init" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.330218 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a714bb29-1c3a-4c0f-abf9-0485d253a3ce" containerName="init" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.330465 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="308eec4b-712a-4be1-af17-846432557cfb" containerName="dnsmasq-dns" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.330494 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="a714bb29-1c3a-4c0f-abf9-0485d253a3ce" containerName="dnsmasq-dns" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.331721 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.334410 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.334870 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.335158 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-9284p" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.335592 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.362308 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg"] Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.511720 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/583c1d0c-fb4d-4d25-9d84-798d63586401-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg\" (UID: \"583c1d0c-fb4d-4d25-9d84-798d63586401\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.511811 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/583c1d0c-fb4d-4d25-9d84-798d63586401-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg\" (UID: \"583c1d0c-fb4d-4d25-9d84-798d63586401\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.511839 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/583c1d0c-fb4d-4d25-9d84-798d63586401-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg\" (UID: \"583c1d0c-fb4d-4d25-9d84-798d63586401\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.511963 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktdwt\" (UniqueName: \"kubernetes.io/projected/583c1d0c-fb4d-4d25-9d84-798d63586401-kube-api-access-ktdwt\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg\" (UID: \"583c1d0c-fb4d-4d25-9d84-798d63586401\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.613472 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktdwt\" (UniqueName: \"kubernetes.io/projected/583c1d0c-fb4d-4d25-9d84-798d63586401-kube-api-access-ktdwt\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg\" (UID: \"583c1d0c-fb4d-4d25-9d84-798d63586401\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.613541 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/583c1d0c-fb4d-4d25-9d84-798d63586401-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg\" (UID: \"583c1d0c-fb4d-4d25-9d84-798d63586401\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.613600 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/583c1d0c-fb4d-4d25-9d84-798d63586401-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg\" (UID: \"583c1d0c-fb4d-4d25-9d84-798d63586401\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.613624 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/583c1d0c-fb4d-4d25-9d84-798d63586401-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg\" (UID: \"583c1d0c-fb4d-4d25-9d84-798d63586401\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.620067 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/583c1d0c-fb4d-4d25-9d84-798d63586401-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg\" (UID: \"583c1d0c-fb4d-4d25-9d84-798d63586401\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.620552 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/583c1d0c-fb4d-4d25-9d84-798d63586401-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg\" (UID: \"583c1d0c-fb4d-4d25-9d84-798d63586401\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.620913 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/583c1d0c-fb4d-4d25-9d84-798d63586401-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg\" (UID: \"583c1d0c-fb4d-4d25-9d84-798d63586401\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.629627 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktdwt\" (UniqueName: \"kubernetes.io/projected/583c1d0c-fb4d-4d25-9d84-798d63586401-kube-api-access-ktdwt\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg\" (UID: \"583c1d0c-fb4d-4d25-9d84-798d63586401\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" Dec 04 17:55:53 crc kubenswrapper[4631]: I1204 17:55:53.657757 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" Dec 04 17:55:54 crc kubenswrapper[4631]: I1204 17:55:54.267903 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg"] Dec 04 17:55:54 crc kubenswrapper[4631]: I1204 17:55:54.540578 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" event={"ID":"583c1d0c-fb4d-4d25-9d84-798d63586401","Type":"ContainerStarted","Data":"5dcb60faf1ed9495373158f76ccd4b073d3bf8b2d76bcf791813fefa0ca7d1c3"} Dec 04 17:55:56 crc kubenswrapper[4631]: I1204 17:55:56.243364 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:55:56 crc kubenswrapper[4631]: E1204 17:55:56.244028 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:55:57 crc kubenswrapper[4631]: I1204 17:55:57.624582 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 04 17:55:57 crc kubenswrapper[4631]: I1204 17:55:57.804557 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 04 17:55:59 crc kubenswrapper[4631]: I1204 17:55:59.389398 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-t6slc"] Dec 04 17:55:59 crc kubenswrapper[4631]: I1204 17:55:59.394200 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t6slc" Dec 04 17:55:59 crc kubenswrapper[4631]: I1204 17:55:59.529594 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a0fb6e6-9a3b-44c6-b820-28edae447393-catalog-content\") pod \"community-operators-t6slc\" (UID: \"7a0fb6e6-9a3b-44c6-b820-28edae447393\") " pod="openshift-marketplace/community-operators-t6slc" Dec 04 17:55:59 crc kubenswrapper[4631]: I1204 17:55:59.529662 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a0fb6e6-9a3b-44c6-b820-28edae447393-utilities\") pod \"community-operators-t6slc\" (UID: \"7a0fb6e6-9a3b-44c6-b820-28edae447393\") " pod="openshift-marketplace/community-operators-t6slc" Dec 04 17:55:59 crc kubenswrapper[4631]: I1204 17:55:59.529720 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f59wt\" (UniqueName: \"kubernetes.io/projected/7a0fb6e6-9a3b-44c6-b820-28edae447393-kube-api-access-f59wt\") pod \"community-operators-t6slc\" (UID: \"7a0fb6e6-9a3b-44c6-b820-28edae447393\") " pod="openshift-marketplace/community-operators-t6slc" Dec 04 17:55:59 crc kubenswrapper[4631]: I1204 17:55:59.631984 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a0fb6e6-9a3b-44c6-b820-28edae447393-catalog-content\") pod \"community-operators-t6slc\" (UID: \"7a0fb6e6-9a3b-44c6-b820-28edae447393\") " pod="openshift-marketplace/community-operators-t6slc" Dec 04 17:55:59 crc kubenswrapper[4631]: I1204 17:55:59.632083 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a0fb6e6-9a3b-44c6-b820-28edae447393-utilities\") pod \"community-operators-t6slc\" (UID: \"7a0fb6e6-9a3b-44c6-b820-28edae447393\") " pod="openshift-marketplace/community-operators-t6slc" Dec 04 17:55:59 crc kubenswrapper[4631]: I1204 17:55:59.632511 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a0fb6e6-9a3b-44c6-b820-28edae447393-catalog-content\") pod \"community-operators-t6slc\" (UID: \"7a0fb6e6-9a3b-44c6-b820-28edae447393\") " pod="openshift-marketplace/community-operators-t6slc" Dec 04 17:55:59 crc kubenswrapper[4631]: I1204 17:55:59.632670 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a0fb6e6-9a3b-44c6-b820-28edae447393-utilities\") pod \"community-operators-t6slc\" (UID: \"7a0fb6e6-9a3b-44c6-b820-28edae447393\") " pod="openshift-marketplace/community-operators-t6slc" Dec 04 17:55:59 crc kubenswrapper[4631]: I1204 17:55:59.632847 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f59wt\" (UniqueName: \"kubernetes.io/projected/7a0fb6e6-9a3b-44c6-b820-28edae447393-kube-api-access-f59wt\") pod \"community-operators-t6slc\" (UID: \"7a0fb6e6-9a3b-44c6-b820-28edae447393\") " pod="openshift-marketplace/community-operators-t6slc" Dec 04 17:55:59 crc kubenswrapper[4631]: I1204 17:55:59.653542 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f59wt\" (UniqueName: \"kubernetes.io/projected/7a0fb6e6-9a3b-44c6-b820-28edae447393-kube-api-access-f59wt\") pod \"community-operators-t6slc\" (UID: \"7a0fb6e6-9a3b-44c6-b820-28edae447393\") " pod="openshift-marketplace/community-operators-t6slc" Dec 04 17:55:59 crc kubenswrapper[4631]: I1204 17:55:59.722799 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t6slc" Dec 04 17:56:00 crc kubenswrapper[4631]: I1204 17:56:00.388943 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-t6slc"] Dec 04 17:56:08 crc kubenswrapper[4631]: I1204 17:56:08.240080 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:56:08 crc kubenswrapper[4631]: E1204 17:56:08.240906 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:56:18 crc kubenswrapper[4631]: I1204 17:56:18.854863 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-t6slc"] Dec 04 17:56:19 crc kubenswrapper[4631]: I1204 17:56:19.785094 4631 generic.go:334] "Generic (PLEG): container finished" podID="7a0fb6e6-9a3b-44c6-b820-28edae447393" containerID="c1e8f42ff73898333d1a34444fa559c86f0eb9ddc9a741a58c49cabd5c7bdcbb" exitCode=0 Dec 04 17:56:19 crc kubenswrapper[4631]: I1204 17:56:19.785542 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t6slc" event={"ID":"7a0fb6e6-9a3b-44c6-b820-28edae447393","Type":"ContainerDied","Data":"c1e8f42ff73898333d1a34444fa559c86f0eb9ddc9a741a58c49cabd5c7bdcbb"} Dec 04 17:56:19 crc kubenswrapper[4631]: I1204 17:56:19.785575 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t6slc" event={"ID":"7a0fb6e6-9a3b-44c6-b820-28edae447393","Type":"ContainerStarted","Data":"b8171b08a6f3058e6001beb0c6cf12c02762f116a72322e31c370ef9c7445a63"} Dec 04 17:56:19 crc kubenswrapper[4631]: I1204 17:56:19.788729 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" event={"ID":"583c1d0c-fb4d-4d25-9d84-798d63586401","Type":"ContainerStarted","Data":"576dfe827077af5bc4cfa1cf2af48f6cc8031046101a5da02804df5d8f9203c5"} Dec 04 17:56:19 crc kubenswrapper[4631]: I1204 17:56:19.837671 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" podStartSLOduration=2.340384655 podStartE2EDuration="26.837650714s" podCreationTimestamp="2025-12-04 17:55:53 +0000 UTC" firstStartedPulling="2025-12-04 17:55:54.275236723 +0000 UTC m=+1684.307478711" lastFinishedPulling="2025-12-04 17:56:18.772502772 +0000 UTC m=+1708.804744770" observedRunningTime="2025-12-04 17:56:19.821987695 +0000 UTC m=+1709.854229703" watchObservedRunningTime="2025-12-04 17:56:19.837650714 +0000 UTC m=+1709.869892722" Dec 04 17:56:20 crc kubenswrapper[4631]: I1204 17:56:20.519034 4631 scope.go:117] "RemoveContainer" containerID="0a25248f2c8ff50b9a0cbd5633bd21d26b2a168752d964bef12dbce9e39e5236" Dec 04 17:56:21 crc kubenswrapper[4631]: I1204 17:56:21.239653 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:56:21 crc kubenswrapper[4631]: E1204 17:56:21.240092 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:56:21 crc kubenswrapper[4631]: I1204 17:56:21.806824 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t6slc" event={"ID":"7a0fb6e6-9a3b-44c6-b820-28edae447393","Type":"ContainerStarted","Data":"6e23bb15bd0b4dfd8833356da670b505f53aa14a3fb2f776cc090e34ab26b4c4"} Dec 04 17:56:22 crc kubenswrapper[4631]: I1204 17:56:22.820806 4631 generic.go:334] "Generic (PLEG): container finished" podID="7a0fb6e6-9a3b-44c6-b820-28edae447393" containerID="6e23bb15bd0b4dfd8833356da670b505f53aa14a3fb2f776cc090e34ab26b4c4" exitCode=0 Dec 04 17:56:22 crc kubenswrapper[4631]: I1204 17:56:22.820891 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t6slc" event={"ID":"7a0fb6e6-9a3b-44c6-b820-28edae447393","Type":"ContainerDied","Data":"6e23bb15bd0b4dfd8833356da670b505f53aa14a3fb2f776cc090e34ab26b4c4"} Dec 04 17:56:23 crc kubenswrapper[4631]: I1204 17:56:23.831782 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t6slc" event={"ID":"7a0fb6e6-9a3b-44c6-b820-28edae447393","Type":"ContainerStarted","Data":"c3f81b3e490bd83ccd8dc56a8a43b1359241cc80e3c51fbfa2410cf7a3dbc25c"} Dec 04 17:56:23 crc kubenswrapper[4631]: I1204 17:56:23.849980 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-t6slc" podStartSLOduration=21.255394154 podStartE2EDuration="24.849965836s" podCreationTimestamp="2025-12-04 17:55:59 +0000 UTC" firstStartedPulling="2025-12-04 17:56:19.7900859 +0000 UTC m=+1709.822327898" lastFinishedPulling="2025-12-04 17:56:23.384657582 +0000 UTC m=+1713.416899580" observedRunningTime="2025-12-04 17:56:23.847555169 +0000 UTC m=+1713.879797167" watchObservedRunningTime="2025-12-04 17:56:23.849965836 +0000 UTC m=+1713.882207834" Dec 04 17:56:29 crc kubenswrapper[4631]: I1204 17:56:29.724379 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-t6slc" Dec 04 17:56:29 crc kubenswrapper[4631]: I1204 17:56:29.724764 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-t6slc" Dec 04 17:56:29 crc kubenswrapper[4631]: I1204 17:56:29.768494 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-t6slc" Dec 04 17:56:29 crc kubenswrapper[4631]: I1204 17:56:29.926099 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-t6slc" Dec 04 17:56:30 crc kubenswrapper[4631]: I1204 17:56:30.587764 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-t6slc"] Dec 04 17:56:30 crc kubenswrapper[4631]: I1204 17:56:30.890775 4631 generic.go:334] "Generic (PLEG): container finished" podID="583c1d0c-fb4d-4d25-9d84-798d63586401" containerID="576dfe827077af5bc4cfa1cf2af48f6cc8031046101a5da02804df5d8f9203c5" exitCode=0 Dec 04 17:56:30 crc kubenswrapper[4631]: I1204 17:56:30.890847 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" event={"ID":"583c1d0c-fb4d-4d25-9d84-798d63586401","Type":"ContainerDied","Data":"576dfe827077af5bc4cfa1cf2af48f6cc8031046101a5da02804df5d8f9203c5"} Dec 04 17:56:31 crc kubenswrapper[4631]: I1204 17:56:31.898672 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-t6slc" podUID="7a0fb6e6-9a3b-44c6-b820-28edae447393" containerName="registry-server" containerID="cri-o://c3f81b3e490bd83ccd8dc56a8a43b1359241cc80e3c51fbfa2410cf7a3dbc25c" gracePeriod=2 Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.369163 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.376282 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t6slc" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.464301 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/583c1d0c-fb4d-4d25-9d84-798d63586401-ssh-key\") pod \"583c1d0c-fb4d-4d25-9d84-798d63586401\" (UID: \"583c1d0c-fb4d-4d25-9d84-798d63586401\") " Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.464737 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/583c1d0c-fb4d-4d25-9d84-798d63586401-inventory\") pod \"583c1d0c-fb4d-4d25-9d84-798d63586401\" (UID: \"583c1d0c-fb4d-4d25-9d84-798d63586401\") " Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.464872 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f59wt\" (UniqueName: \"kubernetes.io/projected/7a0fb6e6-9a3b-44c6-b820-28edae447393-kube-api-access-f59wt\") pod \"7a0fb6e6-9a3b-44c6-b820-28edae447393\" (UID: \"7a0fb6e6-9a3b-44c6-b820-28edae447393\") " Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.464941 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a0fb6e6-9a3b-44c6-b820-28edae447393-catalog-content\") pod \"7a0fb6e6-9a3b-44c6-b820-28edae447393\" (UID: \"7a0fb6e6-9a3b-44c6-b820-28edae447393\") " Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.464977 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/583c1d0c-fb4d-4d25-9d84-798d63586401-repo-setup-combined-ca-bundle\") pod \"583c1d0c-fb4d-4d25-9d84-798d63586401\" (UID: \"583c1d0c-fb4d-4d25-9d84-798d63586401\") " Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.464996 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a0fb6e6-9a3b-44c6-b820-28edae447393-utilities\") pod \"7a0fb6e6-9a3b-44c6-b820-28edae447393\" (UID: \"7a0fb6e6-9a3b-44c6-b820-28edae447393\") " Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.465012 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktdwt\" (UniqueName: \"kubernetes.io/projected/583c1d0c-fb4d-4d25-9d84-798d63586401-kube-api-access-ktdwt\") pod \"583c1d0c-fb4d-4d25-9d84-798d63586401\" (UID: \"583c1d0c-fb4d-4d25-9d84-798d63586401\") " Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.467614 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a0fb6e6-9a3b-44c6-b820-28edae447393-utilities" (OuterVolumeSpecName: "utilities") pod "7a0fb6e6-9a3b-44c6-b820-28edae447393" (UID: "7a0fb6e6-9a3b-44c6-b820-28edae447393"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.470858 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a0fb6e6-9a3b-44c6-b820-28edae447393-kube-api-access-f59wt" (OuterVolumeSpecName: "kube-api-access-f59wt") pod "7a0fb6e6-9a3b-44c6-b820-28edae447393" (UID: "7a0fb6e6-9a3b-44c6-b820-28edae447393"). InnerVolumeSpecName "kube-api-access-f59wt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.471001 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/583c1d0c-fb4d-4d25-9d84-798d63586401-kube-api-access-ktdwt" (OuterVolumeSpecName: "kube-api-access-ktdwt") pod "583c1d0c-fb4d-4d25-9d84-798d63586401" (UID: "583c1d0c-fb4d-4d25-9d84-798d63586401"). InnerVolumeSpecName "kube-api-access-ktdwt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.473445 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/583c1d0c-fb4d-4d25-9d84-798d63586401-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "583c1d0c-fb4d-4d25-9d84-798d63586401" (UID: "583c1d0c-fb4d-4d25-9d84-798d63586401"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.514134 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/583c1d0c-fb4d-4d25-9d84-798d63586401-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "583c1d0c-fb4d-4d25-9d84-798d63586401" (UID: "583c1d0c-fb4d-4d25-9d84-798d63586401"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.535901 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a0fb6e6-9a3b-44c6-b820-28edae447393-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7a0fb6e6-9a3b-44c6-b820-28edae447393" (UID: "7a0fb6e6-9a3b-44c6-b820-28edae447393"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.536265 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/583c1d0c-fb4d-4d25-9d84-798d63586401-inventory" (OuterVolumeSpecName: "inventory") pod "583c1d0c-fb4d-4d25-9d84-798d63586401" (UID: "583c1d0c-fb4d-4d25-9d84-798d63586401"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.566893 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a0fb6e6-9a3b-44c6-b820-28edae447393-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.566926 4631 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/583c1d0c-fb4d-4d25-9d84-798d63586401-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.566940 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktdwt\" (UniqueName: \"kubernetes.io/projected/583c1d0c-fb4d-4d25-9d84-798d63586401-kube-api-access-ktdwt\") on node \"crc\" DevicePath \"\"" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.566951 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a0fb6e6-9a3b-44c6-b820-28edae447393-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.566959 4631 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/583c1d0c-fb4d-4d25-9d84-798d63586401-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.566969 4631 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/583c1d0c-fb4d-4d25-9d84-798d63586401-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.566980 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f59wt\" (UniqueName: \"kubernetes.io/projected/7a0fb6e6-9a3b-44c6-b820-28edae447393-kube-api-access-f59wt\") on node \"crc\" DevicePath \"\"" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.910982 4631 generic.go:334] "Generic (PLEG): container finished" podID="7a0fb6e6-9a3b-44c6-b820-28edae447393" containerID="c3f81b3e490bd83ccd8dc56a8a43b1359241cc80e3c51fbfa2410cf7a3dbc25c" exitCode=0 Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.911067 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t6slc" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.911086 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t6slc" event={"ID":"7a0fb6e6-9a3b-44c6-b820-28edae447393","Type":"ContainerDied","Data":"c3f81b3e490bd83ccd8dc56a8a43b1359241cc80e3c51fbfa2410cf7a3dbc25c"} Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.911127 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t6slc" event={"ID":"7a0fb6e6-9a3b-44c6-b820-28edae447393","Type":"ContainerDied","Data":"b8171b08a6f3058e6001beb0c6cf12c02762f116a72322e31c370ef9c7445a63"} Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.911154 4631 scope.go:117] "RemoveContainer" containerID="c3f81b3e490bd83ccd8dc56a8a43b1359241cc80e3c51fbfa2410cf7a3dbc25c" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.914957 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" event={"ID":"583c1d0c-fb4d-4d25-9d84-798d63586401","Type":"ContainerDied","Data":"5dcb60faf1ed9495373158f76ccd4b073d3bf8b2d76bcf791813fefa0ca7d1c3"} Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.914997 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5dcb60faf1ed9495373158f76ccd4b073d3bf8b2d76bcf791813fefa0ca7d1c3" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.916349 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg" Dec 04 17:56:32 crc kubenswrapper[4631]: I1204 17:56:32.979810 4631 scope.go:117] "RemoveContainer" containerID="6e23bb15bd0b4dfd8833356da670b505f53aa14a3fb2f776cc090e34ab26b4c4" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.000889 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-t6slc"] Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.015501 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-t6slc"] Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.024066 4631 scope.go:117] "RemoveContainer" containerID="c1e8f42ff73898333d1a34444fa559c86f0eb9ddc9a741a58c49cabd5c7bdcbb" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.053419 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289"] Dec 04 17:56:33 crc kubenswrapper[4631]: E1204 17:56:33.053883 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="583c1d0c-fb4d-4d25-9d84-798d63586401" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.053908 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="583c1d0c-fb4d-4d25-9d84-798d63586401" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 04 17:56:33 crc kubenswrapper[4631]: E1204 17:56:33.053932 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a0fb6e6-9a3b-44c6-b820-28edae447393" containerName="extract-content" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.053941 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a0fb6e6-9a3b-44c6-b820-28edae447393" containerName="extract-content" Dec 04 17:56:33 crc kubenswrapper[4631]: E1204 17:56:33.053966 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a0fb6e6-9a3b-44c6-b820-28edae447393" containerName="registry-server" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.053974 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a0fb6e6-9a3b-44c6-b820-28edae447393" containerName="registry-server" Dec 04 17:56:33 crc kubenswrapper[4631]: E1204 17:56:33.053996 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a0fb6e6-9a3b-44c6-b820-28edae447393" containerName="extract-utilities" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.054004 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a0fb6e6-9a3b-44c6-b820-28edae447393" containerName="extract-utilities" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.054221 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="583c1d0c-fb4d-4d25-9d84-798d63586401" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.054247 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a0fb6e6-9a3b-44c6-b820-28edae447393" containerName="registry-server" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.055096 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.062394 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.062829 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-9284p" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.063221 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.063533 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.075109 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289"] Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.109353 4631 scope.go:117] "RemoveContainer" containerID="c3f81b3e490bd83ccd8dc56a8a43b1359241cc80e3c51fbfa2410cf7a3dbc25c" Dec 04 17:56:33 crc kubenswrapper[4631]: E1204 17:56:33.109846 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3f81b3e490bd83ccd8dc56a8a43b1359241cc80e3c51fbfa2410cf7a3dbc25c\": container with ID starting with c3f81b3e490bd83ccd8dc56a8a43b1359241cc80e3c51fbfa2410cf7a3dbc25c not found: ID does not exist" containerID="c3f81b3e490bd83ccd8dc56a8a43b1359241cc80e3c51fbfa2410cf7a3dbc25c" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.109905 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3f81b3e490bd83ccd8dc56a8a43b1359241cc80e3c51fbfa2410cf7a3dbc25c"} err="failed to get container status \"c3f81b3e490bd83ccd8dc56a8a43b1359241cc80e3c51fbfa2410cf7a3dbc25c\": rpc error: code = NotFound desc = could not find container \"c3f81b3e490bd83ccd8dc56a8a43b1359241cc80e3c51fbfa2410cf7a3dbc25c\": container with ID starting with c3f81b3e490bd83ccd8dc56a8a43b1359241cc80e3c51fbfa2410cf7a3dbc25c not found: ID does not exist" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.109935 4631 scope.go:117] "RemoveContainer" containerID="6e23bb15bd0b4dfd8833356da670b505f53aa14a3fb2f776cc090e34ab26b4c4" Dec 04 17:56:33 crc kubenswrapper[4631]: E1204 17:56:33.110239 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e23bb15bd0b4dfd8833356da670b505f53aa14a3fb2f776cc090e34ab26b4c4\": container with ID starting with 6e23bb15bd0b4dfd8833356da670b505f53aa14a3fb2f776cc090e34ab26b4c4 not found: ID does not exist" containerID="6e23bb15bd0b4dfd8833356da670b505f53aa14a3fb2f776cc090e34ab26b4c4" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.110263 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e23bb15bd0b4dfd8833356da670b505f53aa14a3fb2f776cc090e34ab26b4c4"} err="failed to get container status \"6e23bb15bd0b4dfd8833356da670b505f53aa14a3fb2f776cc090e34ab26b4c4\": rpc error: code = NotFound desc = could not find container \"6e23bb15bd0b4dfd8833356da670b505f53aa14a3fb2f776cc090e34ab26b4c4\": container with ID starting with 6e23bb15bd0b4dfd8833356da670b505f53aa14a3fb2f776cc090e34ab26b4c4 not found: ID does not exist" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.110306 4631 scope.go:117] "RemoveContainer" containerID="c1e8f42ff73898333d1a34444fa559c86f0eb9ddc9a741a58c49cabd5c7bdcbb" Dec 04 17:56:33 crc kubenswrapper[4631]: E1204 17:56:33.110919 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1e8f42ff73898333d1a34444fa559c86f0eb9ddc9a741a58c49cabd5c7bdcbb\": container with ID starting with c1e8f42ff73898333d1a34444fa559c86f0eb9ddc9a741a58c49cabd5c7bdcbb not found: ID does not exist" containerID="c1e8f42ff73898333d1a34444fa559c86f0eb9ddc9a741a58c49cabd5c7bdcbb" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.110943 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1e8f42ff73898333d1a34444fa559c86f0eb9ddc9a741a58c49cabd5c7bdcbb"} err="failed to get container status \"c1e8f42ff73898333d1a34444fa559c86f0eb9ddc9a741a58c49cabd5c7bdcbb\": rpc error: code = NotFound desc = could not find container \"c1e8f42ff73898333d1a34444fa559c86f0eb9ddc9a741a58c49cabd5c7bdcbb\": container with ID starting with c1e8f42ff73898333d1a34444fa559c86f0eb9ddc9a741a58c49cabd5c7bdcbb not found: ID does not exist" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.193398 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8vmd\" (UniqueName: \"kubernetes.io/projected/71ccfaca-2557-4840-941c-a36d55ebd0bc-kube-api-access-s8vmd\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5j289\" (UID: \"71ccfaca-2557-4840-941c-a36d55ebd0bc\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.193465 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71ccfaca-2557-4840-941c-a36d55ebd0bc-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5j289\" (UID: \"71ccfaca-2557-4840-941c-a36d55ebd0bc\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.193689 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71ccfaca-2557-4840-941c-a36d55ebd0bc-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5j289\" (UID: \"71ccfaca-2557-4840-941c-a36d55ebd0bc\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.295707 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71ccfaca-2557-4840-941c-a36d55ebd0bc-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5j289\" (UID: \"71ccfaca-2557-4840-941c-a36d55ebd0bc\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.295781 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8vmd\" (UniqueName: \"kubernetes.io/projected/71ccfaca-2557-4840-941c-a36d55ebd0bc-kube-api-access-s8vmd\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5j289\" (UID: \"71ccfaca-2557-4840-941c-a36d55ebd0bc\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.295811 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71ccfaca-2557-4840-941c-a36d55ebd0bc-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5j289\" (UID: \"71ccfaca-2557-4840-941c-a36d55ebd0bc\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.301262 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71ccfaca-2557-4840-941c-a36d55ebd0bc-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5j289\" (UID: \"71ccfaca-2557-4840-941c-a36d55ebd0bc\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.312433 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71ccfaca-2557-4840-941c-a36d55ebd0bc-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5j289\" (UID: \"71ccfaca-2557-4840-941c-a36d55ebd0bc\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.320501 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8vmd\" (UniqueName: \"kubernetes.io/projected/71ccfaca-2557-4840-941c-a36d55ebd0bc-kube-api-access-s8vmd\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5j289\" (UID: \"71ccfaca-2557-4840-941c-a36d55ebd0bc\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.428461 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289" Dec 04 17:56:33 crc kubenswrapper[4631]: I1204 17:56:33.947870 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289"] Dec 04 17:56:34 crc kubenswrapper[4631]: I1204 17:56:34.249492 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a0fb6e6-9a3b-44c6-b820-28edae447393" path="/var/lib/kubelet/pods/7a0fb6e6-9a3b-44c6-b820-28edae447393/volumes" Dec 04 17:56:34 crc kubenswrapper[4631]: I1204 17:56:34.934018 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289" event={"ID":"71ccfaca-2557-4840-941c-a36d55ebd0bc","Type":"ContainerStarted","Data":"a489da60446c85016acd02c008873cc3795ce6e8779ffc7d9c3f5b2c0716cb96"} Dec 04 17:56:34 crc kubenswrapper[4631]: I1204 17:56:34.934399 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289" event={"ID":"71ccfaca-2557-4840-941c-a36d55ebd0bc","Type":"ContainerStarted","Data":"1173591c946fb1f1b1e8e2b5e873b042b10f20f045d9ce077975ca3b492e6d1e"} Dec 04 17:56:34 crc kubenswrapper[4631]: I1204 17:56:34.951938 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289" podStartSLOduration=2.602556752 podStartE2EDuration="2.951919533s" podCreationTimestamp="2025-12-04 17:56:32 +0000 UTC" firstStartedPulling="2025-12-04 17:56:33.94988042 +0000 UTC m=+1723.982122418" lastFinishedPulling="2025-12-04 17:56:34.299243201 +0000 UTC m=+1724.331485199" observedRunningTime="2025-12-04 17:56:34.95073239 +0000 UTC m=+1724.982974388" watchObservedRunningTime="2025-12-04 17:56:34.951919533 +0000 UTC m=+1724.984161531" Dec 04 17:56:35 crc kubenswrapper[4631]: I1204 17:56:35.239996 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:56:35 crc kubenswrapper[4631]: E1204 17:56:35.240291 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:56:37 crc kubenswrapper[4631]: I1204 17:56:37.962785 4631 generic.go:334] "Generic (PLEG): container finished" podID="71ccfaca-2557-4840-941c-a36d55ebd0bc" containerID="a489da60446c85016acd02c008873cc3795ce6e8779ffc7d9c3f5b2c0716cb96" exitCode=0 Dec 04 17:56:37 crc kubenswrapper[4631]: I1204 17:56:37.962900 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289" event={"ID":"71ccfaca-2557-4840-941c-a36d55ebd0bc","Type":"ContainerDied","Data":"a489da60446c85016acd02c008873cc3795ce6e8779ffc7d9c3f5b2c0716cb96"} Dec 04 17:56:40 crc kubenswrapper[4631]: I1204 17:56:40.003769 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289" event={"ID":"71ccfaca-2557-4840-941c-a36d55ebd0bc","Type":"ContainerDied","Data":"1173591c946fb1f1b1e8e2b5e873b042b10f20f045d9ce077975ca3b492e6d1e"} Dec 04 17:56:40 crc kubenswrapper[4631]: I1204 17:56:40.004463 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1173591c946fb1f1b1e8e2b5e873b042b10f20f045d9ce077975ca3b492e6d1e" Dec 04 17:56:40 crc kubenswrapper[4631]: I1204 17:56:40.077203 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289" Dec 04 17:56:40 crc kubenswrapper[4631]: I1204 17:56:40.233581 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71ccfaca-2557-4840-941c-a36d55ebd0bc-inventory\") pod \"71ccfaca-2557-4840-941c-a36d55ebd0bc\" (UID: \"71ccfaca-2557-4840-941c-a36d55ebd0bc\") " Dec 04 17:56:40 crc kubenswrapper[4631]: I1204 17:56:40.233689 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s8vmd\" (UniqueName: \"kubernetes.io/projected/71ccfaca-2557-4840-941c-a36d55ebd0bc-kube-api-access-s8vmd\") pod \"71ccfaca-2557-4840-941c-a36d55ebd0bc\" (UID: \"71ccfaca-2557-4840-941c-a36d55ebd0bc\") " Dec 04 17:56:40 crc kubenswrapper[4631]: I1204 17:56:40.233747 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71ccfaca-2557-4840-941c-a36d55ebd0bc-ssh-key\") pod \"71ccfaca-2557-4840-941c-a36d55ebd0bc\" (UID: \"71ccfaca-2557-4840-941c-a36d55ebd0bc\") " Dec 04 17:56:40 crc kubenswrapper[4631]: I1204 17:56:40.241652 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71ccfaca-2557-4840-941c-a36d55ebd0bc-kube-api-access-s8vmd" (OuterVolumeSpecName: "kube-api-access-s8vmd") pod "71ccfaca-2557-4840-941c-a36d55ebd0bc" (UID: "71ccfaca-2557-4840-941c-a36d55ebd0bc"). InnerVolumeSpecName "kube-api-access-s8vmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 17:56:40 crc kubenswrapper[4631]: I1204 17:56:40.263348 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71ccfaca-2557-4840-941c-a36d55ebd0bc-inventory" (OuterVolumeSpecName: "inventory") pod "71ccfaca-2557-4840-941c-a36d55ebd0bc" (UID: "71ccfaca-2557-4840-941c-a36d55ebd0bc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:56:40 crc kubenswrapper[4631]: I1204 17:56:40.276528 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71ccfaca-2557-4840-941c-a36d55ebd0bc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "71ccfaca-2557-4840-941c-a36d55ebd0bc" (UID: "71ccfaca-2557-4840-941c-a36d55ebd0bc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 17:56:40 crc kubenswrapper[4631]: I1204 17:56:40.337584 4631 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71ccfaca-2557-4840-941c-a36d55ebd0bc-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 17:56:40 crc kubenswrapper[4631]: I1204 17:56:40.337736 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s8vmd\" (UniqueName: \"kubernetes.io/projected/71ccfaca-2557-4840-941c-a36d55ebd0bc-kube-api-access-s8vmd\") on node \"crc\" DevicePath \"\"" Dec 04 17:56:40 crc kubenswrapper[4631]: I1204 17:56:40.337827 4631 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71ccfaca-2557-4840-941c-a36d55ebd0bc-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.014245 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5j289" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.163164 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn"] Dec 04 17:56:41 crc kubenswrapper[4631]: E1204 17:56:41.163613 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71ccfaca-2557-4840-941c-a36d55ebd0bc" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.163630 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="71ccfaca-2557-4840-941c-a36d55ebd0bc" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.163810 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="71ccfaca-2557-4840-941c-a36d55ebd0bc" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.164525 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.170605 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.170659 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.170750 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-9284p" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.173468 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.182264 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn"] Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.254273 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn\" (UID: \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.254331 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn\" (UID: \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.254485 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7tvj\" (UniqueName: \"kubernetes.io/projected/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-kube-api-access-c7tvj\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn\" (UID: \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.254663 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn\" (UID: \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.356238 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn\" (UID: \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.356288 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn\" (UID: \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.356385 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7tvj\" (UniqueName: \"kubernetes.io/projected/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-kube-api-access-c7tvj\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn\" (UID: \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.356440 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn\" (UID: \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.361466 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn\" (UID: \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.362207 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn\" (UID: \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.363433 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn\" (UID: \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.377060 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7tvj\" (UniqueName: \"kubernetes.io/projected/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-kube-api-access-c7tvj\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn\" (UID: \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" Dec 04 17:56:41 crc kubenswrapper[4631]: I1204 17:56:41.486783 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" Dec 04 17:56:42 crc kubenswrapper[4631]: I1204 17:56:42.007476 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn"] Dec 04 17:56:42 crc kubenswrapper[4631]: I1204 17:56:42.024579 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" event={"ID":"9789c9a8-e2ff-4344-a946-81d8a8ef26fe","Type":"ContainerStarted","Data":"a35dd10923070c78d793d728600aecc56f8aade37c797cc867d90c690b99c113"} Dec 04 17:56:43 crc kubenswrapper[4631]: I1204 17:56:43.041530 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" event={"ID":"9789c9a8-e2ff-4344-a946-81d8a8ef26fe","Type":"ContainerStarted","Data":"24b8e3a8c7fa2ca197bf8dc3586e2bfe7d0028366d68c465d4d34465ac692145"} Dec 04 17:56:43 crc kubenswrapper[4631]: I1204 17:56:43.078943 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" podStartSLOduration=1.864294659 podStartE2EDuration="2.07891733s" podCreationTimestamp="2025-12-04 17:56:41 +0000 UTC" firstStartedPulling="2025-12-04 17:56:42.011617528 +0000 UTC m=+1732.043859526" lastFinishedPulling="2025-12-04 17:56:42.226240189 +0000 UTC m=+1732.258482197" observedRunningTime="2025-12-04 17:56:43.060310698 +0000 UTC m=+1733.092552696" watchObservedRunningTime="2025-12-04 17:56:43.07891733 +0000 UTC m=+1733.111159338" Dec 04 17:56:47 crc kubenswrapper[4631]: I1204 17:56:47.239417 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:56:47 crc kubenswrapper[4631]: E1204 17:56:47.240220 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:57:00 crc kubenswrapper[4631]: I1204 17:57:00.246293 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:57:00 crc kubenswrapper[4631]: E1204 17:57:00.247155 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:57:14 crc kubenswrapper[4631]: I1204 17:57:14.239920 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:57:14 crc kubenswrapper[4631]: E1204 17:57:14.240600 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:57:25 crc kubenswrapper[4631]: I1204 17:57:25.239622 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:57:25 crc kubenswrapper[4631]: E1204 17:57:25.240500 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:57:37 crc kubenswrapper[4631]: I1204 17:57:37.239956 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:57:37 crc kubenswrapper[4631]: E1204 17:57:37.240889 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:57:51 crc kubenswrapper[4631]: I1204 17:57:51.239974 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:57:51 crc kubenswrapper[4631]: E1204 17:57:51.241718 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:58:02 crc kubenswrapper[4631]: I1204 17:58:02.242054 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:58:02 crc kubenswrapper[4631]: E1204 17:58:02.242776 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:58:14 crc kubenswrapper[4631]: I1204 17:58:14.240292 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:58:14 crc kubenswrapper[4631]: E1204 17:58:14.241011 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:58:29 crc kubenswrapper[4631]: I1204 17:58:29.239306 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:58:29 crc kubenswrapper[4631]: E1204 17:58:29.240230 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 17:58:42 crc kubenswrapper[4631]: I1204 17:58:42.239225 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 17:58:43 crc kubenswrapper[4631]: I1204 17:58:43.125795 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"93cd7a21a80bbde50526477519b7b8631d88aed11055adc56d8ec73b1d4639e3"} Dec 04 17:59:07 crc kubenswrapper[4631]: I1204 17:59:07.143650 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-j97nj"] Dec 04 17:59:07 crc kubenswrapper[4631]: I1204 17:59:07.196430 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-4214-account-create-update-wph5g"] Dec 04 17:59:07 crc kubenswrapper[4631]: I1204 17:59:07.212850 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-d6rbm"] Dec 04 17:59:07 crc kubenswrapper[4631]: I1204 17:59:07.221853 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-4214-account-create-update-wph5g"] Dec 04 17:59:07 crc kubenswrapper[4631]: I1204 17:59:07.231455 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-j97nj"] Dec 04 17:59:07 crc kubenswrapper[4631]: I1204 17:59:07.240220 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-d6rbm"] Dec 04 17:59:08 crc kubenswrapper[4631]: I1204 17:59:08.038697 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-15b3-account-create-update-cxkhf"] Dec 04 17:59:08 crc kubenswrapper[4631]: I1204 17:59:08.051675 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-15b3-account-create-update-cxkhf"] Dec 04 17:59:08 crc kubenswrapper[4631]: I1204 17:59:08.061009 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-lblx5"] Dec 04 17:59:08 crc kubenswrapper[4631]: I1204 17:59:08.071513 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-9167-account-create-update-gt79q"] Dec 04 17:59:08 crc kubenswrapper[4631]: I1204 17:59:08.078734 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-lblx5"] Dec 04 17:59:08 crc kubenswrapper[4631]: I1204 17:59:08.088694 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-9167-account-create-update-gt79q"] Dec 04 17:59:08 crc kubenswrapper[4631]: I1204 17:59:08.255099 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02956ee6-42ea-4056-ba19-1a79683632b9" path="/var/lib/kubelet/pods/02956ee6-42ea-4056-ba19-1a79683632b9/volumes" Dec 04 17:59:08 crc kubenswrapper[4631]: I1204 17:59:08.256554 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45fe96ac-432d-4912-a365-ad375be740f0" path="/var/lib/kubelet/pods/45fe96ac-432d-4912-a365-ad375be740f0/volumes" Dec 04 17:59:08 crc kubenswrapper[4631]: I1204 17:59:08.258028 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a142a3c-8ece-4f7e-a43d-778c9ad25a32" path="/var/lib/kubelet/pods/5a142a3c-8ece-4f7e-a43d-778c9ad25a32/volumes" Dec 04 17:59:08 crc kubenswrapper[4631]: I1204 17:59:08.259405 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b60da41-b099-42cf-a044-d268327eb8e7" path="/var/lib/kubelet/pods/6b60da41-b099-42cf-a044-d268327eb8e7/volumes" Dec 04 17:59:08 crc kubenswrapper[4631]: I1204 17:59:08.263806 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d07b4988-dc6b-4414-abf4-24abf610ecbf" path="/var/lib/kubelet/pods/d07b4988-dc6b-4414-abf4-24abf610ecbf/volumes" Dec 04 17:59:08 crc kubenswrapper[4631]: I1204 17:59:08.266961 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fab6d82a-4302-4582-ab50-411aae70084a" path="/var/lib/kubelet/pods/fab6d82a-4302-4582-ab50-411aae70084a/volumes" Dec 04 17:59:20 crc kubenswrapper[4631]: I1204 17:59:20.658507 4631 scope.go:117] "RemoveContainer" containerID="5080b7d0504ecf6365bf7989bf1ee5734b38c41069c6ac1cf3904ac2236740c3" Dec 04 17:59:20 crc kubenswrapper[4631]: I1204 17:59:20.693677 4631 scope.go:117] "RemoveContainer" containerID="2320af929f7b2276aba2af835bfede5952c85cfc885580b16654f33f88ea3801" Dec 04 17:59:20 crc kubenswrapper[4631]: I1204 17:59:20.742823 4631 scope.go:117] "RemoveContainer" containerID="3c0f7870a5a221c78a9d2e146e9e1e0f59b7d10646ee685454bc38f108b16bfe" Dec 04 17:59:20 crc kubenswrapper[4631]: I1204 17:59:20.782614 4631 scope.go:117] "RemoveContainer" containerID="31ed7fdf8d2bf4de358e7fba4701f474e7354295f9984e24e38fefff4697d515" Dec 04 17:59:20 crc kubenswrapper[4631]: I1204 17:59:20.829753 4631 scope.go:117] "RemoveContainer" containerID="b79e1d04d4364ea017e8e769b932deb0f5cfff74110bfaf66547a964f9f65a07" Dec 04 17:59:20 crc kubenswrapper[4631]: I1204 17:59:20.866675 4631 scope.go:117] "RemoveContainer" containerID="c4a585bb7c54ce6df32e5648229e5a1335a7a6a4655b04b16a83faeec1f5fe4b" Dec 04 17:59:37 crc kubenswrapper[4631]: I1204 17:59:37.049179 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-gg9j7"] Dec 04 17:59:37 crc kubenswrapper[4631]: I1204 17:59:37.060000 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-3f0f-account-create-update-8ptr9"] Dec 04 17:59:37 crc kubenswrapper[4631]: I1204 17:59:37.067350 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-927b-account-create-update-7jqb5"] Dec 04 17:59:37 crc kubenswrapper[4631]: I1204 17:59:37.081345 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-x45nc"] Dec 04 17:59:37 crc kubenswrapper[4631]: I1204 17:59:37.093947 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-6029-account-create-update-hx8lr"] Dec 04 17:59:37 crc kubenswrapper[4631]: I1204 17:59:37.104488 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-3f0f-account-create-update-8ptr9"] Dec 04 17:59:37 crc kubenswrapper[4631]: I1204 17:59:37.113817 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-gg9j7"] Dec 04 17:59:37 crc kubenswrapper[4631]: I1204 17:59:37.122848 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-wgt4k"] Dec 04 17:59:37 crc kubenswrapper[4631]: I1204 17:59:37.135265 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-6029-account-create-update-hx8lr"] Dec 04 17:59:37 crc kubenswrapper[4631]: I1204 17:59:37.155277 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-x45nc"] Dec 04 17:59:37 crc kubenswrapper[4631]: I1204 17:59:37.193222 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-wgt4k"] Dec 04 17:59:37 crc kubenswrapper[4631]: I1204 17:59:37.203300 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-927b-account-create-update-7jqb5"] Dec 04 17:59:38 crc kubenswrapper[4631]: I1204 17:59:38.254785 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d740232-221b-4667-bb8e-995d626b74ce" path="/var/lib/kubelet/pods/2d740232-221b-4667-bb8e-995d626b74ce/volumes" Dec 04 17:59:38 crc kubenswrapper[4631]: I1204 17:59:38.256311 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ea88fc7-43b7-4b93-b9f0-e6833868a1cf" path="/var/lib/kubelet/pods/2ea88fc7-43b7-4b93-b9f0-e6833868a1cf/volumes" Dec 04 17:59:38 crc kubenswrapper[4631]: I1204 17:59:38.262470 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cd3284d-eef1-46b8-a5eb-88c0a1772c61" path="/var/lib/kubelet/pods/3cd3284d-eef1-46b8-a5eb-88c0a1772c61/volumes" Dec 04 17:59:38 crc kubenswrapper[4631]: I1204 17:59:38.264138 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f09451c-0497-468a-a555-df590ba4e739" path="/var/lib/kubelet/pods/4f09451c-0497-468a-a555-df590ba4e739/volumes" Dec 04 17:59:38 crc kubenswrapper[4631]: I1204 17:59:38.264953 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b827db3-b1ee-4b2d-8e57-807fc9449549" path="/var/lib/kubelet/pods/9b827db3-b1ee-4b2d-8e57-807fc9449549/volumes" Dec 04 17:59:38 crc kubenswrapper[4631]: I1204 17:59:38.266592 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d06df4fc-ddce-4c76-b612-6e94abc94c9d" path="/var/lib/kubelet/pods/d06df4fc-ddce-4c76-b612-6e94abc94c9d/volumes" Dec 04 17:59:42 crc kubenswrapper[4631]: I1204 17:59:42.031745 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-qgrwj"] Dec 04 17:59:42 crc kubenswrapper[4631]: I1204 17:59:42.071436 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-qgrwj"] Dec 04 17:59:42 crc kubenswrapper[4631]: I1204 17:59:42.248934 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f5756a6-d59c-4653-ac3a-8ba1fd91862b" path="/var/lib/kubelet/pods/8f5756a6-d59c-4653-ac3a-8ba1fd91862b/volumes" Dec 04 17:59:53 crc kubenswrapper[4631]: I1204 17:59:53.211626 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-q95bk"] Dec 04 17:59:53 crc kubenswrapper[4631]: I1204 17:59:53.214200 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q95bk" Dec 04 17:59:53 crc kubenswrapper[4631]: I1204 17:59:53.244974 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q95bk"] Dec 04 17:59:53 crc kubenswrapper[4631]: I1204 17:59:53.321580 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4v8c\" (UniqueName: \"kubernetes.io/projected/0f3f50d4-2cbd-4b36-b6b4-00e312badb0f-kube-api-access-j4v8c\") pod \"certified-operators-q95bk\" (UID: \"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f\") " pod="openshift-marketplace/certified-operators-q95bk" Dec 04 17:59:53 crc kubenswrapper[4631]: I1204 17:59:53.322053 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f3f50d4-2cbd-4b36-b6b4-00e312badb0f-catalog-content\") pod \"certified-operators-q95bk\" (UID: \"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f\") " pod="openshift-marketplace/certified-operators-q95bk" Dec 04 17:59:53 crc kubenswrapper[4631]: I1204 17:59:53.322246 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f3f50d4-2cbd-4b36-b6b4-00e312badb0f-utilities\") pod \"certified-operators-q95bk\" (UID: \"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f\") " pod="openshift-marketplace/certified-operators-q95bk" Dec 04 17:59:53 crc kubenswrapper[4631]: I1204 17:59:53.424294 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4v8c\" (UniqueName: \"kubernetes.io/projected/0f3f50d4-2cbd-4b36-b6b4-00e312badb0f-kube-api-access-j4v8c\") pod \"certified-operators-q95bk\" (UID: \"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f\") " pod="openshift-marketplace/certified-operators-q95bk" Dec 04 17:59:53 crc kubenswrapper[4631]: I1204 17:59:53.424706 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f3f50d4-2cbd-4b36-b6b4-00e312badb0f-catalog-content\") pod \"certified-operators-q95bk\" (UID: \"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f\") " pod="openshift-marketplace/certified-operators-q95bk" Dec 04 17:59:53 crc kubenswrapper[4631]: I1204 17:59:53.424943 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f3f50d4-2cbd-4b36-b6b4-00e312badb0f-utilities\") pod \"certified-operators-q95bk\" (UID: \"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f\") " pod="openshift-marketplace/certified-operators-q95bk" Dec 04 17:59:53 crc kubenswrapper[4631]: I1204 17:59:53.425128 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f3f50d4-2cbd-4b36-b6b4-00e312badb0f-catalog-content\") pod \"certified-operators-q95bk\" (UID: \"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f\") " pod="openshift-marketplace/certified-operators-q95bk" Dec 04 17:59:53 crc kubenswrapper[4631]: I1204 17:59:53.425418 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f3f50d4-2cbd-4b36-b6b4-00e312badb0f-utilities\") pod \"certified-operators-q95bk\" (UID: \"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f\") " pod="openshift-marketplace/certified-operators-q95bk" Dec 04 17:59:53 crc kubenswrapper[4631]: I1204 17:59:53.443955 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4v8c\" (UniqueName: \"kubernetes.io/projected/0f3f50d4-2cbd-4b36-b6b4-00e312badb0f-kube-api-access-j4v8c\") pod \"certified-operators-q95bk\" (UID: \"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f\") " pod="openshift-marketplace/certified-operators-q95bk" Dec 04 17:59:53 crc kubenswrapper[4631]: I1204 17:59:53.553678 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q95bk" Dec 04 17:59:54 crc kubenswrapper[4631]: W1204 17:59:54.017508 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0f3f50d4_2cbd_4b36_b6b4_00e312badb0f.slice/crio-7ddd6637e8e9ab3509a6c4960b38b4c7c3aa5e479234915a2210d1f1a7d9600f WatchSource:0}: Error finding container 7ddd6637e8e9ab3509a6c4960b38b4c7c3aa5e479234915a2210d1f1a7d9600f: Status 404 returned error can't find the container with id 7ddd6637e8e9ab3509a6c4960b38b4c7c3aa5e479234915a2210d1f1a7d9600f Dec 04 17:59:54 crc kubenswrapper[4631]: I1204 17:59:54.017925 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q95bk"] Dec 04 17:59:54 crc kubenswrapper[4631]: I1204 17:59:54.789673 4631 generic.go:334] "Generic (PLEG): container finished" podID="0f3f50d4-2cbd-4b36-b6b4-00e312badb0f" containerID="d865191746326baf2ae83e1a6fb20c384e057bc0c507ca3402ecef81e5d70d51" exitCode=0 Dec 04 17:59:54 crc kubenswrapper[4631]: I1204 17:59:54.789937 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q95bk" event={"ID":"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f","Type":"ContainerDied","Data":"d865191746326baf2ae83e1a6fb20c384e057bc0c507ca3402ecef81e5d70d51"} Dec 04 17:59:54 crc kubenswrapper[4631]: I1204 17:59:54.789966 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q95bk" event={"ID":"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f","Type":"ContainerStarted","Data":"7ddd6637e8e9ab3509a6c4960b38b4c7c3aa5e479234915a2210d1f1a7d9600f"} Dec 04 17:59:54 crc kubenswrapper[4631]: I1204 17:59:54.792132 4631 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 18:00:00 crc kubenswrapper[4631]: I1204 18:00:00.148798 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r"] Dec 04 18:00:00 crc kubenswrapper[4631]: I1204 18:00:00.151041 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r" Dec 04 18:00:00 crc kubenswrapper[4631]: I1204 18:00:00.153195 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 04 18:00:00 crc kubenswrapper[4631]: I1204 18:00:00.153550 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 04 18:00:00 crc kubenswrapper[4631]: I1204 18:00:00.173611 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c570a48-b033-455f-8a52-6169134b24ec-config-volume\") pod \"collect-profiles-29414520-d8m4r\" (UID: \"9c570a48-b033-455f-8a52-6169134b24ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r" Dec 04 18:00:00 crc kubenswrapper[4631]: I1204 18:00:00.173661 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c570a48-b033-455f-8a52-6169134b24ec-secret-volume\") pod \"collect-profiles-29414520-d8m4r\" (UID: \"9c570a48-b033-455f-8a52-6169134b24ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r" Dec 04 18:00:00 crc kubenswrapper[4631]: I1204 18:00:00.173747 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cck7f\" (UniqueName: \"kubernetes.io/projected/9c570a48-b033-455f-8a52-6169134b24ec-kube-api-access-cck7f\") pod \"collect-profiles-29414520-d8m4r\" (UID: \"9c570a48-b033-455f-8a52-6169134b24ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r" Dec 04 18:00:00 crc kubenswrapper[4631]: I1204 18:00:00.173950 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r"] Dec 04 18:00:00 crc kubenswrapper[4631]: I1204 18:00:00.275059 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cck7f\" (UniqueName: \"kubernetes.io/projected/9c570a48-b033-455f-8a52-6169134b24ec-kube-api-access-cck7f\") pod \"collect-profiles-29414520-d8m4r\" (UID: \"9c570a48-b033-455f-8a52-6169134b24ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r" Dec 04 18:00:00 crc kubenswrapper[4631]: I1204 18:00:00.275465 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c570a48-b033-455f-8a52-6169134b24ec-config-volume\") pod \"collect-profiles-29414520-d8m4r\" (UID: \"9c570a48-b033-455f-8a52-6169134b24ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r" Dec 04 18:00:00 crc kubenswrapper[4631]: I1204 18:00:00.276535 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c570a48-b033-455f-8a52-6169134b24ec-secret-volume\") pod \"collect-profiles-29414520-d8m4r\" (UID: \"9c570a48-b033-455f-8a52-6169134b24ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r" Dec 04 18:00:00 crc kubenswrapper[4631]: I1204 18:00:00.276428 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c570a48-b033-455f-8a52-6169134b24ec-config-volume\") pod \"collect-profiles-29414520-d8m4r\" (UID: \"9c570a48-b033-455f-8a52-6169134b24ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r" Dec 04 18:00:00 crc kubenswrapper[4631]: I1204 18:00:00.291624 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c570a48-b033-455f-8a52-6169134b24ec-secret-volume\") pod \"collect-profiles-29414520-d8m4r\" (UID: \"9c570a48-b033-455f-8a52-6169134b24ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r" Dec 04 18:00:00 crc kubenswrapper[4631]: I1204 18:00:00.292328 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cck7f\" (UniqueName: \"kubernetes.io/projected/9c570a48-b033-455f-8a52-6169134b24ec-kube-api-access-cck7f\") pod \"collect-profiles-29414520-d8m4r\" (UID: \"9c570a48-b033-455f-8a52-6169134b24ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r" Dec 04 18:00:00 crc kubenswrapper[4631]: I1204 18:00:00.478303 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r" Dec 04 18:00:01 crc kubenswrapper[4631]: I1204 18:00:01.145998 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r"] Dec 04 18:00:01 crc kubenswrapper[4631]: W1204 18:00:01.149889 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c570a48_b033_455f_8a52_6169134b24ec.slice/crio-b45e945240103f5dabf1f1ca8f62355d3eafdafa25bd7a2d0c780fa7119d26ed WatchSource:0}: Error finding container b45e945240103f5dabf1f1ca8f62355d3eafdafa25bd7a2d0c780fa7119d26ed: Status 404 returned error can't find the container with id b45e945240103f5dabf1f1ca8f62355d3eafdafa25bd7a2d0c780fa7119d26ed Dec 04 18:00:01 crc kubenswrapper[4631]: I1204 18:00:01.874619 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q95bk" event={"ID":"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f","Type":"ContainerStarted","Data":"09c00272e91fe577a4291302ed2699f82eaa5e0f5aea95a43aefadbea437e69c"} Dec 04 18:00:01 crc kubenswrapper[4631]: I1204 18:00:01.875800 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r" event={"ID":"9c570a48-b033-455f-8a52-6169134b24ec","Type":"ContainerStarted","Data":"920958e77b829db9ca620eaec30971eb113fab1daba2ea489208546434e9068c"} Dec 04 18:00:01 crc kubenswrapper[4631]: I1204 18:00:01.875850 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r" event={"ID":"9c570a48-b033-455f-8a52-6169134b24ec","Type":"ContainerStarted","Data":"b45e945240103f5dabf1f1ca8f62355d3eafdafa25bd7a2d0c780fa7119d26ed"} Dec 04 18:00:01 crc kubenswrapper[4631]: I1204 18:00:01.933623 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r" podStartSLOduration=1.933604256 podStartE2EDuration="1.933604256s" podCreationTimestamp="2025-12-04 18:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 18:00:01.932712611 +0000 UTC m=+1931.964954609" watchObservedRunningTime="2025-12-04 18:00:01.933604256 +0000 UTC m=+1931.965846264" Dec 04 18:00:02 crc kubenswrapper[4631]: I1204 18:00:02.886256 4631 generic.go:334] "Generic (PLEG): container finished" podID="0f3f50d4-2cbd-4b36-b6b4-00e312badb0f" containerID="09c00272e91fe577a4291302ed2699f82eaa5e0f5aea95a43aefadbea437e69c" exitCode=0 Dec 04 18:00:02 crc kubenswrapper[4631]: I1204 18:00:02.886332 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q95bk" event={"ID":"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f","Type":"ContainerDied","Data":"09c00272e91fe577a4291302ed2699f82eaa5e0f5aea95a43aefadbea437e69c"} Dec 04 18:00:02 crc kubenswrapper[4631]: I1204 18:00:02.889655 4631 generic.go:334] "Generic (PLEG): container finished" podID="9c570a48-b033-455f-8a52-6169134b24ec" containerID="920958e77b829db9ca620eaec30971eb113fab1daba2ea489208546434e9068c" exitCode=0 Dec 04 18:00:02 crc kubenswrapper[4631]: I1204 18:00:02.889675 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r" event={"ID":"9c570a48-b033-455f-8a52-6169134b24ec","Type":"ContainerDied","Data":"920958e77b829db9ca620eaec30971eb113fab1daba2ea489208546434e9068c"} Dec 04 18:00:03 crc kubenswrapper[4631]: I1204 18:00:03.903541 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q95bk" event={"ID":"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f","Type":"ContainerStarted","Data":"772b862120ba7a4d831f3f9e65c7226051aa5847714389f145eef80a487064c1"} Dec 04 18:00:03 crc kubenswrapper[4631]: I1204 18:00:03.941118 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-q95bk" podStartSLOduration=2.201452792 podStartE2EDuration="10.941098835s" podCreationTimestamp="2025-12-04 17:59:53 +0000 UTC" firstStartedPulling="2025-12-04 17:59:54.791738621 +0000 UTC m=+1924.823980619" lastFinishedPulling="2025-12-04 18:00:03.531384674 +0000 UTC m=+1933.563626662" observedRunningTime="2025-12-04 18:00:03.93551607 +0000 UTC m=+1933.967758068" watchObservedRunningTime="2025-12-04 18:00:03.941098835 +0000 UTC m=+1933.973340833" Dec 04 18:00:04 crc kubenswrapper[4631]: I1204 18:00:04.284444 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r" Dec 04 18:00:04 crc kubenswrapper[4631]: I1204 18:00:04.448926 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c570a48-b033-455f-8a52-6169134b24ec-config-volume\") pod \"9c570a48-b033-455f-8a52-6169134b24ec\" (UID: \"9c570a48-b033-455f-8a52-6169134b24ec\") " Dec 04 18:00:04 crc kubenswrapper[4631]: I1204 18:00:04.449005 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c570a48-b033-455f-8a52-6169134b24ec-secret-volume\") pod \"9c570a48-b033-455f-8a52-6169134b24ec\" (UID: \"9c570a48-b033-455f-8a52-6169134b24ec\") " Dec 04 18:00:04 crc kubenswrapper[4631]: I1204 18:00:04.449239 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cck7f\" (UniqueName: \"kubernetes.io/projected/9c570a48-b033-455f-8a52-6169134b24ec-kube-api-access-cck7f\") pod \"9c570a48-b033-455f-8a52-6169134b24ec\" (UID: \"9c570a48-b033-455f-8a52-6169134b24ec\") " Dec 04 18:00:04 crc kubenswrapper[4631]: I1204 18:00:04.449783 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c570a48-b033-455f-8a52-6169134b24ec-config-volume" (OuterVolumeSpecName: "config-volume") pod "9c570a48-b033-455f-8a52-6169134b24ec" (UID: "9c570a48-b033-455f-8a52-6169134b24ec"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 18:00:04 crc kubenswrapper[4631]: I1204 18:00:04.456632 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c570a48-b033-455f-8a52-6169134b24ec-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9c570a48-b033-455f-8a52-6169134b24ec" (UID: "9c570a48-b033-455f-8a52-6169134b24ec"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:00:04 crc kubenswrapper[4631]: I1204 18:00:04.458491 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c570a48-b033-455f-8a52-6169134b24ec-kube-api-access-cck7f" (OuterVolumeSpecName: "kube-api-access-cck7f") pod "9c570a48-b033-455f-8a52-6169134b24ec" (UID: "9c570a48-b033-455f-8a52-6169134b24ec"). InnerVolumeSpecName "kube-api-access-cck7f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:00:04 crc kubenswrapper[4631]: I1204 18:00:04.551958 4631 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c570a48-b033-455f-8a52-6169134b24ec-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 04 18:00:04 crc kubenswrapper[4631]: I1204 18:00:04.552192 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cck7f\" (UniqueName: \"kubernetes.io/projected/9c570a48-b033-455f-8a52-6169134b24ec-kube-api-access-cck7f\") on node \"crc\" DevicePath \"\"" Dec 04 18:00:04 crc kubenswrapper[4631]: I1204 18:00:04.552279 4631 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c570a48-b033-455f-8a52-6169134b24ec-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 18:00:04 crc kubenswrapper[4631]: I1204 18:00:04.910307 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r" event={"ID":"9c570a48-b033-455f-8a52-6169134b24ec","Type":"ContainerDied","Data":"b45e945240103f5dabf1f1ca8f62355d3eafdafa25bd7a2d0c780fa7119d26ed"} Dec 04 18:00:04 crc kubenswrapper[4631]: I1204 18:00:04.911005 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b45e945240103f5dabf1f1ca8f62355d3eafdafa25bd7a2d0c780fa7119d26ed" Dec 04 18:00:04 crc kubenswrapper[4631]: I1204 18:00:04.910327 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r" Dec 04 18:00:12 crc kubenswrapper[4631]: I1204 18:00:12.974316 4631 generic.go:334] "Generic (PLEG): container finished" podID="9789c9a8-e2ff-4344-a946-81d8a8ef26fe" containerID="24b8e3a8c7fa2ca197bf8dc3586e2bfe7d0028366d68c465d4d34465ac692145" exitCode=0 Dec 04 18:00:12 crc kubenswrapper[4631]: I1204 18:00:12.974397 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" event={"ID":"9789c9a8-e2ff-4344-a946-81d8a8ef26fe","Type":"ContainerDied","Data":"24b8e3a8c7fa2ca197bf8dc3586e2bfe7d0028366d68c465d4d34465ac692145"} Dec 04 18:00:13 crc kubenswrapper[4631]: I1204 18:00:13.553894 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-q95bk" Dec 04 18:00:13 crc kubenswrapper[4631]: I1204 18:00:13.554266 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-q95bk" Dec 04 18:00:13 crc kubenswrapper[4631]: I1204 18:00:13.613087 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-q95bk" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.040883 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-q95bk" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.136479 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q95bk"] Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.180713 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-n859d"] Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.181679 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-n859d" podUID="4d32387e-c5ff-4cf8-8383-38bf64325277" containerName="registry-server" containerID="cri-o://a998fd61eca91ce3044bfd104f6a00ec156f5dedc1b2d20fb0590243b5f90f3e" gracePeriod=2 Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.490891 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.650245 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-inventory\") pod \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\" (UID: \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\") " Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.650378 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-ssh-key\") pod \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\" (UID: \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\") " Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.650526 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7tvj\" (UniqueName: \"kubernetes.io/projected/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-kube-api-access-c7tvj\") pod \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\" (UID: \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\") " Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.650589 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-bootstrap-combined-ca-bundle\") pod \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\" (UID: \"9789c9a8-e2ff-4344-a946-81d8a8ef26fe\") " Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.665563 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "9789c9a8-e2ff-4344-a946-81d8a8ef26fe" (UID: "9789c9a8-e2ff-4344-a946-81d8a8ef26fe"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.671668 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-kube-api-access-c7tvj" (OuterVolumeSpecName: "kube-api-access-c7tvj") pod "9789c9a8-e2ff-4344-a946-81d8a8ef26fe" (UID: "9789c9a8-e2ff-4344-a946-81d8a8ef26fe"). InnerVolumeSpecName "kube-api-access-c7tvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.683659 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9789c9a8-e2ff-4344-a946-81d8a8ef26fe" (UID: "9789c9a8-e2ff-4344-a946-81d8a8ef26fe"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.702079 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-inventory" (OuterVolumeSpecName: "inventory") pod "9789c9a8-e2ff-4344-a946-81d8a8ef26fe" (UID: "9789c9a8-e2ff-4344-a946-81d8a8ef26fe"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.722713 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n859d" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.752422 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7tvj\" (UniqueName: \"kubernetes.io/projected/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-kube-api-access-c7tvj\") on node \"crc\" DevicePath \"\"" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.752451 4631 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.752462 4631 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.752471 4631 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9789c9a8-e2ff-4344-a946-81d8a8ef26fe-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.854336 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d32387e-c5ff-4cf8-8383-38bf64325277-catalog-content\") pod \"4d32387e-c5ff-4cf8-8383-38bf64325277\" (UID: \"4d32387e-c5ff-4cf8-8383-38bf64325277\") " Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.854557 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d32387e-c5ff-4cf8-8383-38bf64325277-utilities\") pod \"4d32387e-c5ff-4cf8-8383-38bf64325277\" (UID: \"4d32387e-c5ff-4cf8-8383-38bf64325277\") " Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.854628 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djv76\" (UniqueName: \"kubernetes.io/projected/4d32387e-c5ff-4cf8-8383-38bf64325277-kube-api-access-djv76\") pod \"4d32387e-c5ff-4cf8-8383-38bf64325277\" (UID: \"4d32387e-c5ff-4cf8-8383-38bf64325277\") " Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.856000 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d32387e-c5ff-4cf8-8383-38bf64325277-utilities" (OuterVolumeSpecName: "utilities") pod "4d32387e-c5ff-4cf8-8383-38bf64325277" (UID: "4d32387e-c5ff-4cf8-8383-38bf64325277"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.859750 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d32387e-c5ff-4cf8-8383-38bf64325277-kube-api-access-djv76" (OuterVolumeSpecName: "kube-api-access-djv76") pod "4d32387e-c5ff-4cf8-8383-38bf64325277" (UID: "4d32387e-c5ff-4cf8-8383-38bf64325277"). InnerVolumeSpecName "kube-api-access-djv76". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.914906 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d32387e-c5ff-4cf8-8383-38bf64325277-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d32387e-c5ff-4cf8-8383-38bf64325277" (UID: "4d32387e-c5ff-4cf8-8383-38bf64325277"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.957033 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d32387e-c5ff-4cf8-8383-38bf64325277-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.957078 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d32387e-c5ff-4cf8-8383-38bf64325277-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.957092 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djv76\" (UniqueName: \"kubernetes.io/projected/4d32387e-c5ff-4cf8-8383-38bf64325277-kube-api-access-djv76\") on node \"crc\" DevicePath \"\"" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.996989 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.996963 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn" event={"ID":"9789c9a8-e2ff-4344-a946-81d8a8ef26fe","Type":"ContainerDied","Data":"a35dd10923070c78d793d728600aecc56f8aade37c797cc867d90c690b99c113"} Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.997113 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a35dd10923070c78d793d728600aecc56f8aade37c797cc867d90c690b99c113" Dec 04 18:00:14 crc kubenswrapper[4631]: I1204 18:00:14.999867 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n859d" event={"ID":"4d32387e-c5ff-4cf8-8383-38bf64325277","Type":"ContainerDied","Data":"a998fd61eca91ce3044bfd104f6a00ec156f5dedc1b2d20fb0590243b5f90f3e"} Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:14.999917 4631 scope.go:117] "RemoveContainer" containerID="a998fd61eca91ce3044bfd104f6a00ec156f5dedc1b2d20fb0590243b5f90f3e" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:14.999934 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n859d" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.000050 4631 generic.go:334] "Generic (PLEG): container finished" podID="4d32387e-c5ff-4cf8-8383-38bf64325277" containerID="a998fd61eca91ce3044bfd104f6a00ec156f5dedc1b2d20fb0590243b5f90f3e" exitCode=0 Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.000086 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n859d" event={"ID":"4d32387e-c5ff-4cf8-8383-38bf64325277","Type":"ContainerDied","Data":"50f1a1c2a84c35b67680383885e64c4945f5d72549b248973888c907ba30d3ae"} Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.031477 4631 scope.go:117] "RemoveContainer" containerID="757efba221f6fff72c228d3085528b237a8666ee3e7f8f4c89840203f99cf1b8" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.062669 4631 scope.go:117] "RemoveContainer" containerID="2b92f0a41aaf28ffc8a02ceb2b9ef573278478553f36300f5fa5ac40007b6f80" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.066605 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-n859d"] Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.077676 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-n859d"] Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.101016 4631 scope.go:117] "RemoveContainer" containerID="a998fd61eca91ce3044bfd104f6a00ec156f5dedc1b2d20fb0590243b5f90f3e" Dec 04 18:00:15 crc kubenswrapper[4631]: E1204 18:00:15.104553 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a998fd61eca91ce3044bfd104f6a00ec156f5dedc1b2d20fb0590243b5f90f3e\": container with ID starting with a998fd61eca91ce3044bfd104f6a00ec156f5dedc1b2d20fb0590243b5f90f3e not found: ID does not exist" containerID="a998fd61eca91ce3044bfd104f6a00ec156f5dedc1b2d20fb0590243b5f90f3e" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.104592 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a998fd61eca91ce3044bfd104f6a00ec156f5dedc1b2d20fb0590243b5f90f3e"} err="failed to get container status \"a998fd61eca91ce3044bfd104f6a00ec156f5dedc1b2d20fb0590243b5f90f3e\": rpc error: code = NotFound desc = could not find container \"a998fd61eca91ce3044bfd104f6a00ec156f5dedc1b2d20fb0590243b5f90f3e\": container with ID starting with a998fd61eca91ce3044bfd104f6a00ec156f5dedc1b2d20fb0590243b5f90f3e not found: ID does not exist" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.104616 4631 scope.go:117] "RemoveContainer" containerID="757efba221f6fff72c228d3085528b237a8666ee3e7f8f4c89840203f99cf1b8" Dec 04 18:00:15 crc kubenswrapper[4631]: E1204 18:00:15.107808 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"757efba221f6fff72c228d3085528b237a8666ee3e7f8f4c89840203f99cf1b8\": container with ID starting with 757efba221f6fff72c228d3085528b237a8666ee3e7f8f4c89840203f99cf1b8 not found: ID does not exist" containerID="757efba221f6fff72c228d3085528b237a8666ee3e7f8f4c89840203f99cf1b8" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.107871 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"757efba221f6fff72c228d3085528b237a8666ee3e7f8f4c89840203f99cf1b8"} err="failed to get container status \"757efba221f6fff72c228d3085528b237a8666ee3e7f8f4c89840203f99cf1b8\": rpc error: code = NotFound desc = could not find container \"757efba221f6fff72c228d3085528b237a8666ee3e7f8f4c89840203f99cf1b8\": container with ID starting with 757efba221f6fff72c228d3085528b237a8666ee3e7f8f4c89840203f99cf1b8 not found: ID does not exist" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.107911 4631 scope.go:117] "RemoveContainer" containerID="2b92f0a41aaf28ffc8a02ceb2b9ef573278478553f36300f5fa5ac40007b6f80" Dec 04 18:00:15 crc kubenswrapper[4631]: E1204 18:00:15.108184 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b92f0a41aaf28ffc8a02ceb2b9ef573278478553f36300f5fa5ac40007b6f80\": container with ID starting with 2b92f0a41aaf28ffc8a02ceb2b9ef573278478553f36300f5fa5ac40007b6f80 not found: ID does not exist" containerID="2b92f0a41aaf28ffc8a02ceb2b9ef573278478553f36300f5fa5ac40007b6f80" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.108239 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b92f0a41aaf28ffc8a02ceb2b9ef573278478553f36300f5fa5ac40007b6f80"} err="failed to get container status \"2b92f0a41aaf28ffc8a02ceb2b9ef573278478553f36300f5fa5ac40007b6f80\": rpc error: code = NotFound desc = could not find container \"2b92f0a41aaf28ffc8a02ceb2b9ef573278478553f36300f5fa5ac40007b6f80\": container with ID starting with 2b92f0a41aaf28ffc8a02ceb2b9ef573278478553f36300f5fa5ac40007b6f80 not found: ID does not exist" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.135030 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq"] Dec 04 18:00:15 crc kubenswrapper[4631]: E1204 18:00:15.135455 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d32387e-c5ff-4cf8-8383-38bf64325277" containerName="extract-content" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.135471 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d32387e-c5ff-4cf8-8383-38bf64325277" containerName="extract-content" Dec 04 18:00:15 crc kubenswrapper[4631]: E1204 18:00:15.135495 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d32387e-c5ff-4cf8-8383-38bf64325277" containerName="registry-server" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.135502 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d32387e-c5ff-4cf8-8383-38bf64325277" containerName="registry-server" Dec 04 18:00:15 crc kubenswrapper[4631]: E1204 18:00:15.135515 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c570a48-b033-455f-8a52-6169134b24ec" containerName="collect-profiles" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.135521 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c570a48-b033-455f-8a52-6169134b24ec" containerName="collect-profiles" Dec 04 18:00:15 crc kubenswrapper[4631]: E1204 18:00:15.135534 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9789c9a8-e2ff-4344-a946-81d8a8ef26fe" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.135541 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="9789c9a8-e2ff-4344-a946-81d8a8ef26fe" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 04 18:00:15 crc kubenswrapper[4631]: E1204 18:00:15.135562 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d32387e-c5ff-4cf8-8383-38bf64325277" containerName="extract-utilities" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.135569 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d32387e-c5ff-4cf8-8383-38bf64325277" containerName="extract-utilities" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.135731 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c570a48-b033-455f-8a52-6169134b24ec" containerName="collect-profiles" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.135743 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d32387e-c5ff-4cf8-8383-38bf64325277" containerName="registry-server" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.135762 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="9789c9a8-e2ff-4344-a946-81d8a8ef26fe" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.136416 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.139422 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.139635 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-9284p" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.139670 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.139756 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.149847 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq"] Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.261814 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcbw5\" (UniqueName: \"kubernetes.io/projected/8a60b6a3-2e66-46ad-987f-9c6aac93e03f-kube-api-access-jcbw5\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq\" (UID: \"8a60b6a3-2e66-46ad-987f-9c6aac93e03f\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.261862 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a60b6a3-2e66-46ad-987f-9c6aac93e03f-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq\" (UID: \"8a60b6a3-2e66-46ad-987f-9c6aac93e03f\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.261984 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a60b6a3-2e66-46ad-987f-9c6aac93e03f-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq\" (UID: \"8a60b6a3-2e66-46ad-987f-9c6aac93e03f\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.363464 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcbw5\" (UniqueName: \"kubernetes.io/projected/8a60b6a3-2e66-46ad-987f-9c6aac93e03f-kube-api-access-jcbw5\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq\" (UID: \"8a60b6a3-2e66-46ad-987f-9c6aac93e03f\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.363532 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a60b6a3-2e66-46ad-987f-9c6aac93e03f-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq\" (UID: \"8a60b6a3-2e66-46ad-987f-9c6aac93e03f\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.364002 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a60b6a3-2e66-46ad-987f-9c6aac93e03f-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq\" (UID: \"8a60b6a3-2e66-46ad-987f-9c6aac93e03f\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.380132 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a60b6a3-2e66-46ad-987f-9c6aac93e03f-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq\" (UID: \"8a60b6a3-2e66-46ad-987f-9c6aac93e03f\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.382660 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a60b6a3-2e66-46ad-987f-9c6aac93e03f-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq\" (UID: \"8a60b6a3-2e66-46ad-987f-9c6aac93e03f\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.387542 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcbw5\" (UniqueName: \"kubernetes.io/projected/8a60b6a3-2e66-46ad-987f-9c6aac93e03f-kube-api-access-jcbw5\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq\" (UID: \"8a60b6a3-2e66-46ad-987f-9c6aac93e03f\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq" Dec 04 18:00:15 crc kubenswrapper[4631]: I1204 18:00:15.474970 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq" Dec 04 18:00:16 crc kubenswrapper[4631]: I1204 18:00:16.173053 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq"] Dec 04 18:00:16 crc kubenswrapper[4631]: I1204 18:00:16.249795 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d32387e-c5ff-4cf8-8383-38bf64325277" path="/var/lib/kubelet/pods/4d32387e-c5ff-4cf8-8383-38bf64325277/volumes" Dec 04 18:00:17 crc kubenswrapper[4631]: I1204 18:00:17.023073 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq" event={"ID":"8a60b6a3-2e66-46ad-987f-9c6aac93e03f","Type":"ContainerStarted","Data":"e02f558c6601804df0168df41457762f802f423cb726d657c1966b7408f75b09"} Dec 04 18:00:17 crc kubenswrapper[4631]: I1204 18:00:17.023512 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq" event={"ID":"8a60b6a3-2e66-46ad-987f-9c6aac93e03f","Type":"ContainerStarted","Data":"680cd4e5975ac975b4b842cb46a4a4af1dfa3cbfa8bd8809883188a9a2f52368"} Dec 04 18:00:17 crc kubenswrapper[4631]: I1204 18:00:17.048214 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq" podStartSLOduration=1.887919309 podStartE2EDuration="2.048193553s" podCreationTimestamp="2025-12-04 18:00:15 +0000 UTC" firstStartedPulling="2025-12-04 18:00:16.174031517 +0000 UTC m=+1946.206273515" lastFinishedPulling="2025-12-04 18:00:16.334305761 +0000 UTC m=+1946.366547759" observedRunningTime="2025-12-04 18:00:17.042409101 +0000 UTC m=+1947.074651109" watchObservedRunningTime="2025-12-04 18:00:17.048193553 +0000 UTC m=+1947.080435571" Dec 04 18:00:18 crc kubenswrapper[4631]: I1204 18:00:18.047796 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-4hqks"] Dec 04 18:00:18 crc kubenswrapper[4631]: I1204 18:00:18.055998 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-4hqks"] Dec 04 18:00:18 crc kubenswrapper[4631]: I1204 18:00:18.063080 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-5dhm2"] Dec 04 18:00:18 crc kubenswrapper[4631]: I1204 18:00:18.073808 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-5dhm2"] Dec 04 18:00:18 crc kubenswrapper[4631]: I1204 18:00:18.253492 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30425aae-4c9c-445c-8d10-d4e5874fda30" path="/var/lib/kubelet/pods/30425aae-4c9c-445c-8d10-d4e5874fda30/volumes" Dec 04 18:00:18 crc kubenswrapper[4631]: I1204 18:00:18.254901 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3aa97a3-43fb-4478-bd4e-e06494e42efd" path="/var/lib/kubelet/pods/d3aa97a3-43fb-4478-bd4e-e06494e42efd/volumes" Dec 04 18:00:21 crc kubenswrapper[4631]: I1204 18:00:21.024597 4631 scope.go:117] "RemoveContainer" containerID="02556b9e0072dcfb8b14a61e603273eef0d200a7a1b32a0fc699dce2da1dea6d" Dec 04 18:00:21 crc kubenswrapper[4631]: I1204 18:00:21.061143 4631 scope.go:117] "RemoveContainer" containerID="440108ea496c9ab660f6b98ceab053949b035d7b63ea3e20aaa516e2e5ae3c16" Dec 04 18:00:21 crc kubenswrapper[4631]: I1204 18:00:21.104647 4631 scope.go:117] "RemoveContainer" containerID="86b19dcb6971fa3c48d1e270ff2cb133a09b5de1d1c819038d02434ff32618f2" Dec 04 18:00:21 crc kubenswrapper[4631]: I1204 18:00:21.122479 4631 scope.go:117] "RemoveContainer" containerID="eaec7493df6ab934e4f6263a57c742760acde1b22328c17865cd503d36376c6c" Dec 04 18:00:21 crc kubenswrapper[4631]: I1204 18:00:21.147738 4631 scope.go:117] "RemoveContainer" containerID="3eada3c14394596d6513d90071095938a7b659eff3460ae64a5eda9a1381b44f" Dec 04 18:00:21 crc kubenswrapper[4631]: I1204 18:00:21.195639 4631 scope.go:117] "RemoveContainer" containerID="d1a4125258045a3ddd27d1b6170ec77289ffad8f8e6f1930c230bcf7e9d7a895" Dec 04 18:00:21 crc kubenswrapper[4631]: I1204 18:00:21.231825 4631 scope.go:117] "RemoveContainer" containerID="a9ef860e5552eed3443a89382124feae6c17a86b1237152806641bd2cd1f0107" Dec 04 18:00:21 crc kubenswrapper[4631]: I1204 18:00:21.286438 4631 scope.go:117] "RemoveContainer" containerID="cfb40f7f80875ab75e8dd5de120bdafda5e72c82de6badee61b1f01b64272add" Dec 04 18:00:21 crc kubenswrapper[4631]: I1204 18:00:21.307866 4631 scope.go:117] "RemoveContainer" containerID="5c5a2c32dcf6b8c47d005af1666096a25b065ea8558929a026282cc710b10523" Dec 04 18:00:21 crc kubenswrapper[4631]: I1204 18:00:21.328075 4631 scope.go:117] "RemoveContainer" containerID="c2d36cd9c0d529514bee8629228118b59a0010c45056e8c06e9a820ba6d2fa9e" Dec 04 18:00:21 crc kubenswrapper[4631]: I1204 18:00:21.346763 4631 scope.go:117] "RemoveContainer" containerID="8093fdc24b1052668163f4b7f21ebb473ba522ed88cd98c8f3c711e54ec19ab2" Dec 04 18:00:21 crc kubenswrapper[4631]: I1204 18:00:21.367166 4631 scope.go:117] "RemoveContainer" containerID="a028cbcabf035c32e5c2316924df66f4c7a2ccdc9c8060591ec1fa6e2f769834" Dec 04 18:00:21 crc kubenswrapper[4631]: I1204 18:00:21.387500 4631 scope.go:117] "RemoveContainer" containerID="1e81b276fa391be2298e89d2f2828e7d7929e13f82aacfca1ccae4831bcdf449" Dec 04 18:00:36 crc kubenswrapper[4631]: I1204 18:00:36.048499 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-tf4br"] Dec 04 18:00:36 crc kubenswrapper[4631]: I1204 18:00:36.061041 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-gq8bc"] Dec 04 18:00:36 crc kubenswrapper[4631]: I1204 18:00:36.072490 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-tf4br"] Dec 04 18:00:36 crc kubenswrapper[4631]: I1204 18:00:36.085328 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-gq8bc"] Dec 04 18:00:36 crc kubenswrapper[4631]: I1204 18:00:36.249104 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3171d16d-db61-4d69-b9c7-262da016be91" path="/var/lib/kubelet/pods/3171d16d-db61-4d69-b9c7-262da016be91/volumes" Dec 04 18:00:36 crc kubenswrapper[4631]: I1204 18:00:36.250179 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6763071-ba0b-4ef7-9843-9a4c66fe4a6f" path="/var/lib/kubelet/pods/c6763071-ba0b-4ef7-9843-9a4c66fe4a6f/volumes" Dec 04 18:00:54 crc kubenswrapper[4631]: I1204 18:00:54.032717 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-jbcc2"] Dec 04 18:00:54 crc kubenswrapper[4631]: I1204 18:00:54.040108 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-jbcc2"] Dec 04 18:00:54 crc kubenswrapper[4631]: I1204 18:00:54.250877 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0aff05c-75cd-495a-903e-83b72596bf86" path="/var/lib/kubelet/pods/d0aff05c-75cd-495a-903e-83b72596bf86/volumes" Dec 04 18:00:55 crc kubenswrapper[4631]: I1204 18:00:55.033998 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-49bhb"] Dec 04 18:00:55 crc kubenswrapper[4631]: I1204 18:00:55.045340 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-49bhb"] Dec 04 18:00:56 crc kubenswrapper[4631]: I1204 18:00:56.252260 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="caa9015d-d530-4caa-8a24-2338d69519a3" path="/var/lib/kubelet/pods/caa9015d-d530-4caa-8a24-2338d69519a3/volumes" Dec 04 18:01:00 crc kubenswrapper[4631]: I1204 18:01:00.151587 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29414521-6wj5d"] Dec 04 18:01:00 crc kubenswrapper[4631]: I1204 18:01:00.153275 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29414521-6wj5d" Dec 04 18:01:00 crc kubenswrapper[4631]: I1204 18:01:00.181941 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29414521-6wj5d"] Dec 04 18:01:00 crc kubenswrapper[4631]: I1204 18:01:00.320925 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-fernet-keys\") pod \"keystone-cron-29414521-6wj5d\" (UID: \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\") " pod="openstack/keystone-cron-29414521-6wj5d" Dec 04 18:01:00 crc kubenswrapper[4631]: I1204 18:01:00.321021 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzkbd\" (UniqueName: \"kubernetes.io/projected/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-kube-api-access-fzkbd\") pod \"keystone-cron-29414521-6wj5d\" (UID: \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\") " pod="openstack/keystone-cron-29414521-6wj5d" Dec 04 18:01:00 crc kubenswrapper[4631]: I1204 18:01:00.321050 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-config-data\") pod \"keystone-cron-29414521-6wj5d\" (UID: \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\") " pod="openstack/keystone-cron-29414521-6wj5d" Dec 04 18:01:00 crc kubenswrapper[4631]: I1204 18:01:00.321328 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-combined-ca-bundle\") pod \"keystone-cron-29414521-6wj5d\" (UID: \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\") " pod="openstack/keystone-cron-29414521-6wj5d" Dec 04 18:01:00 crc kubenswrapper[4631]: I1204 18:01:00.423262 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-fernet-keys\") pod \"keystone-cron-29414521-6wj5d\" (UID: \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\") " pod="openstack/keystone-cron-29414521-6wj5d" Dec 04 18:01:00 crc kubenswrapper[4631]: I1204 18:01:00.423345 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzkbd\" (UniqueName: \"kubernetes.io/projected/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-kube-api-access-fzkbd\") pod \"keystone-cron-29414521-6wj5d\" (UID: \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\") " pod="openstack/keystone-cron-29414521-6wj5d" Dec 04 18:01:00 crc kubenswrapper[4631]: I1204 18:01:00.423366 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-config-data\") pod \"keystone-cron-29414521-6wj5d\" (UID: \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\") " pod="openstack/keystone-cron-29414521-6wj5d" Dec 04 18:01:00 crc kubenswrapper[4631]: I1204 18:01:00.423442 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-combined-ca-bundle\") pod \"keystone-cron-29414521-6wj5d\" (UID: \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\") " pod="openstack/keystone-cron-29414521-6wj5d" Dec 04 18:01:00 crc kubenswrapper[4631]: I1204 18:01:00.438200 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-combined-ca-bundle\") pod \"keystone-cron-29414521-6wj5d\" (UID: \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\") " pod="openstack/keystone-cron-29414521-6wj5d" Dec 04 18:01:00 crc kubenswrapper[4631]: I1204 18:01:00.438305 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-fernet-keys\") pod \"keystone-cron-29414521-6wj5d\" (UID: \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\") " pod="openstack/keystone-cron-29414521-6wj5d" Dec 04 18:01:00 crc kubenswrapper[4631]: I1204 18:01:00.441184 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-config-data\") pod \"keystone-cron-29414521-6wj5d\" (UID: \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\") " pod="openstack/keystone-cron-29414521-6wj5d" Dec 04 18:01:00 crc kubenswrapper[4631]: I1204 18:01:00.441933 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzkbd\" (UniqueName: \"kubernetes.io/projected/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-kube-api-access-fzkbd\") pod \"keystone-cron-29414521-6wj5d\" (UID: \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\") " pod="openstack/keystone-cron-29414521-6wj5d" Dec 04 18:01:00 crc kubenswrapper[4631]: I1204 18:01:00.533463 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29414521-6wj5d" Dec 04 18:01:00 crc kubenswrapper[4631]: I1204 18:01:00.984349 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29414521-6wj5d"] Dec 04 18:01:01 crc kubenswrapper[4631]: I1204 18:01:01.505945 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29414521-6wj5d" event={"ID":"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958","Type":"ContainerStarted","Data":"75b6dd752bbfda51a737b138f6c5732c9c5ce31aa7080896920c60484d6cdc8a"} Dec 04 18:01:01 crc kubenswrapper[4631]: I1204 18:01:01.505990 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29414521-6wj5d" event={"ID":"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958","Type":"ContainerStarted","Data":"909afb337f1595f77c6f5b4e0535e7d3f5fdc44a21ba06a7605107f06654e6bb"} Dec 04 18:01:01 crc kubenswrapper[4631]: I1204 18:01:01.525346 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29414521-6wj5d" podStartSLOduration=1.5253290800000001 podStartE2EDuration="1.52532908s" podCreationTimestamp="2025-12-04 18:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 18:01:01.518350534 +0000 UTC m=+1991.550592542" watchObservedRunningTime="2025-12-04 18:01:01.52532908 +0000 UTC m=+1991.557571078" Dec 04 18:01:04 crc kubenswrapper[4631]: I1204 18:01:04.533074 4631 generic.go:334] "Generic (PLEG): container finished" podID="bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958" containerID="75b6dd752bbfda51a737b138f6c5732c9c5ce31aa7080896920c60484d6cdc8a" exitCode=0 Dec 04 18:01:04 crc kubenswrapper[4631]: I1204 18:01:04.533177 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29414521-6wj5d" event={"ID":"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958","Type":"ContainerDied","Data":"75b6dd752bbfda51a737b138f6c5732c9c5ce31aa7080896920c60484d6cdc8a"} Dec 04 18:01:05 crc kubenswrapper[4631]: I1204 18:01:05.826938 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29414521-6wj5d" Dec 04 18:01:05 crc kubenswrapper[4631]: I1204 18:01:05.945118 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzkbd\" (UniqueName: \"kubernetes.io/projected/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-kube-api-access-fzkbd\") pod \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\" (UID: \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\") " Dec 04 18:01:05 crc kubenswrapper[4631]: I1204 18:01:05.945245 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-fernet-keys\") pod \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\" (UID: \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\") " Dec 04 18:01:05 crc kubenswrapper[4631]: I1204 18:01:05.945307 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-config-data\") pod \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\" (UID: \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\") " Dec 04 18:01:05 crc kubenswrapper[4631]: I1204 18:01:05.946133 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-combined-ca-bundle\") pod \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\" (UID: \"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958\") " Dec 04 18:01:05 crc kubenswrapper[4631]: I1204 18:01:05.951539 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958" (UID: "bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:01:05 crc kubenswrapper[4631]: I1204 18:01:05.952471 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-kube-api-access-fzkbd" (OuterVolumeSpecName: "kube-api-access-fzkbd") pod "bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958" (UID: "bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958"). InnerVolumeSpecName "kube-api-access-fzkbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:01:05 crc kubenswrapper[4631]: I1204 18:01:05.987278 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958" (UID: "bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:01:06 crc kubenswrapper[4631]: I1204 18:01:06.006681 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-config-data" (OuterVolumeSpecName: "config-data") pod "bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958" (UID: "bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:01:06 crc kubenswrapper[4631]: I1204 18:01:06.023043 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:01:06 crc kubenswrapper[4631]: I1204 18:01:06.023094 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:01:06 crc kubenswrapper[4631]: I1204 18:01:06.048505 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzkbd\" (UniqueName: \"kubernetes.io/projected/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-kube-api-access-fzkbd\") on node \"crc\" DevicePath \"\"" Dec 04 18:01:06 crc kubenswrapper[4631]: I1204 18:01:06.048713 4631 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 04 18:01:06 crc kubenswrapper[4631]: I1204 18:01:06.048787 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 18:01:06 crc kubenswrapper[4631]: I1204 18:01:06.048854 4631 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 18:01:06 crc kubenswrapper[4631]: I1204 18:01:06.551311 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29414521-6wj5d" event={"ID":"bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958","Type":"ContainerDied","Data":"909afb337f1595f77c6f5b4e0535e7d3f5fdc44a21ba06a7605107f06654e6bb"} Dec 04 18:01:06 crc kubenswrapper[4631]: I1204 18:01:06.551636 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="909afb337f1595f77c6f5b4e0535e7d3f5fdc44a21ba06a7605107f06654e6bb" Dec 04 18:01:06 crc kubenswrapper[4631]: I1204 18:01:06.551362 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29414521-6wj5d" Dec 04 18:01:22 crc kubenswrapper[4631]: I1204 18:01:22.288016 4631 scope.go:117] "RemoveContainer" containerID="a3ecb284e0b4f23844c2fcacf3a2193c79bcf9be9efd4e20429073bd95d4a669" Dec 04 18:01:22 crc kubenswrapper[4631]: I1204 18:01:22.317472 4631 scope.go:117] "RemoveContainer" containerID="7110f67dd1beafaa9218cabab58a590d25f39a6838e30c5fe36a3ef4426a0a50" Dec 04 18:01:22 crc kubenswrapper[4631]: I1204 18:01:22.385291 4631 scope.go:117] "RemoveContainer" containerID="e208b22924cd5eb32f1e398d016449503ea542b4d58964a3f358045a30c66cc1" Dec 04 18:01:22 crc kubenswrapper[4631]: I1204 18:01:22.415038 4631 scope.go:117] "RemoveContainer" containerID="ce6f763c15253453cfaa81b44591a8da15943491c7ec49411fff8ccb2918bdf8" Dec 04 18:01:36 crc kubenswrapper[4631]: I1204 18:01:36.023046 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:01:36 crc kubenswrapper[4631]: I1204 18:01:36.023781 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:01:54 crc kubenswrapper[4631]: I1204 18:01:54.035315 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-pn9l7"] Dec 04 18:01:54 crc kubenswrapper[4631]: I1204 18:01:54.044639 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-pn9l7"] Dec 04 18:01:54 crc kubenswrapper[4631]: I1204 18:01:54.248905 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6acc24ed-974b-438a-b145-cc7923b76914" path="/var/lib/kubelet/pods/6acc24ed-974b-438a-b145-cc7923b76914/volumes" Dec 04 18:01:55 crc kubenswrapper[4631]: I1204 18:01:55.037351 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-5qnqc"] Dec 04 18:01:55 crc kubenswrapper[4631]: I1204 18:01:55.048232 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-2hgnm"] Dec 04 18:01:55 crc kubenswrapper[4631]: I1204 18:01:55.061344 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-5qnqc"] Dec 04 18:01:55 crc kubenswrapper[4631]: I1204 18:01:55.071494 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-3f1a-account-create-update-bp6mt"] Dec 04 18:01:55 crc kubenswrapper[4631]: I1204 18:01:55.080222 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-ab4b-account-create-update-lrvjf"] Dec 04 18:01:55 crc kubenswrapper[4631]: I1204 18:01:55.087258 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-2hgnm"] Dec 04 18:01:55 crc kubenswrapper[4631]: I1204 18:01:55.095489 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-284d-account-create-update-q8jnv"] Dec 04 18:01:55 crc kubenswrapper[4631]: I1204 18:01:55.102399 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-ab4b-account-create-update-lrvjf"] Dec 04 18:01:55 crc kubenswrapper[4631]: I1204 18:01:55.111226 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-3f1a-account-create-update-bp6mt"] Dec 04 18:01:55 crc kubenswrapper[4631]: I1204 18:01:55.117647 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-284d-account-create-update-q8jnv"] Dec 04 18:01:56 crc kubenswrapper[4631]: I1204 18:01:56.259573 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="090be7c8-dbbd-4eb5-8621-4443f4f809d0" path="/var/lib/kubelet/pods/090be7c8-dbbd-4eb5-8621-4443f4f809d0/volumes" Dec 04 18:01:56 crc kubenswrapper[4631]: I1204 18:01:56.261271 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e9c2e3a-2169-4229-8d6c-63d4517c39fb" path="/var/lib/kubelet/pods/2e9c2e3a-2169-4229-8d6c-63d4517c39fb/volumes" Dec 04 18:01:56 crc kubenswrapper[4631]: I1204 18:01:56.262616 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43ae9593-0539-4f0d-8221-4f4bb2684ec0" path="/var/lib/kubelet/pods/43ae9593-0539-4f0d-8221-4f4bb2684ec0/volumes" Dec 04 18:01:56 crc kubenswrapper[4631]: I1204 18:01:56.263858 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3cce586-6911-47cd-84ce-4bdef87f7bec" path="/var/lib/kubelet/pods/c3cce586-6911-47cd-84ce-4bdef87f7bec/volumes" Dec 04 18:01:56 crc kubenswrapper[4631]: I1204 18:01:56.266339 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f62e7c75-f842-4abc-b88f-2a69145acea0" path="/var/lib/kubelet/pods/f62e7c75-f842-4abc-b88f-2a69145acea0/volumes" Dec 04 18:02:06 crc kubenswrapper[4631]: I1204 18:02:06.023451 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:02:06 crc kubenswrapper[4631]: I1204 18:02:06.024034 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:02:06 crc kubenswrapper[4631]: I1204 18:02:06.024090 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 18:02:06 crc kubenswrapper[4631]: I1204 18:02:06.024934 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"93cd7a21a80bbde50526477519b7b8631d88aed11055adc56d8ec73b1d4639e3"} pod="openshift-machine-config-operator/machine-config-daemon-q27wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 18:02:06 crc kubenswrapper[4631]: I1204 18:02:06.024994 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" containerID="cri-o://93cd7a21a80bbde50526477519b7b8631d88aed11055adc56d8ec73b1d4639e3" gracePeriod=600 Dec 04 18:02:07 crc kubenswrapper[4631]: I1204 18:02:07.062983 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerID="93cd7a21a80bbde50526477519b7b8631d88aed11055adc56d8ec73b1d4639e3" exitCode=0 Dec 04 18:02:07 crc kubenswrapper[4631]: I1204 18:02:07.063051 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerDied","Data":"93cd7a21a80bbde50526477519b7b8631d88aed11055adc56d8ec73b1d4639e3"} Dec 04 18:02:07 crc kubenswrapper[4631]: I1204 18:02:07.063533 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6"} Dec 04 18:02:07 crc kubenswrapper[4631]: I1204 18:02:07.063554 4631 scope.go:117] "RemoveContainer" containerID="05dae2508e5958e8d873ab9c8ea102a30ffc667f1f28f23cd5e4be15f446e009" Dec 04 18:02:12 crc kubenswrapper[4631]: I1204 18:02:12.106684 4631 generic.go:334] "Generic (PLEG): container finished" podID="8a60b6a3-2e66-46ad-987f-9c6aac93e03f" containerID="e02f558c6601804df0168df41457762f802f423cb726d657c1966b7408f75b09" exitCode=0 Dec 04 18:02:12 crc kubenswrapper[4631]: I1204 18:02:12.106788 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq" event={"ID":"8a60b6a3-2e66-46ad-987f-9c6aac93e03f","Type":"ContainerDied","Data":"e02f558c6601804df0168df41457762f802f423cb726d657c1966b7408f75b09"} Dec 04 18:02:13 crc kubenswrapper[4631]: I1204 18:02:13.545856 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq" Dec 04 18:02:13 crc kubenswrapper[4631]: I1204 18:02:13.631724 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcbw5\" (UniqueName: \"kubernetes.io/projected/8a60b6a3-2e66-46ad-987f-9c6aac93e03f-kube-api-access-jcbw5\") pod \"8a60b6a3-2e66-46ad-987f-9c6aac93e03f\" (UID: \"8a60b6a3-2e66-46ad-987f-9c6aac93e03f\") " Dec 04 18:02:13 crc kubenswrapper[4631]: I1204 18:02:13.632429 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a60b6a3-2e66-46ad-987f-9c6aac93e03f-ssh-key\") pod \"8a60b6a3-2e66-46ad-987f-9c6aac93e03f\" (UID: \"8a60b6a3-2e66-46ad-987f-9c6aac93e03f\") " Dec 04 18:02:13 crc kubenswrapper[4631]: I1204 18:02:13.632585 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a60b6a3-2e66-46ad-987f-9c6aac93e03f-inventory\") pod \"8a60b6a3-2e66-46ad-987f-9c6aac93e03f\" (UID: \"8a60b6a3-2e66-46ad-987f-9c6aac93e03f\") " Dec 04 18:02:13 crc kubenswrapper[4631]: I1204 18:02:13.637846 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a60b6a3-2e66-46ad-987f-9c6aac93e03f-kube-api-access-jcbw5" (OuterVolumeSpecName: "kube-api-access-jcbw5") pod "8a60b6a3-2e66-46ad-987f-9c6aac93e03f" (UID: "8a60b6a3-2e66-46ad-987f-9c6aac93e03f"). InnerVolumeSpecName "kube-api-access-jcbw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:02:13 crc kubenswrapper[4631]: I1204 18:02:13.662417 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a60b6a3-2e66-46ad-987f-9c6aac93e03f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8a60b6a3-2e66-46ad-987f-9c6aac93e03f" (UID: "8a60b6a3-2e66-46ad-987f-9c6aac93e03f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:02:13 crc kubenswrapper[4631]: I1204 18:02:13.663263 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a60b6a3-2e66-46ad-987f-9c6aac93e03f-inventory" (OuterVolumeSpecName: "inventory") pod "8a60b6a3-2e66-46ad-987f-9c6aac93e03f" (UID: "8a60b6a3-2e66-46ad-987f-9c6aac93e03f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:02:13 crc kubenswrapper[4631]: I1204 18:02:13.734961 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcbw5\" (UniqueName: \"kubernetes.io/projected/8a60b6a3-2e66-46ad-987f-9c6aac93e03f-kube-api-access-jcbw5\") on node \"crc\" DevicePath \"\"" Dec 04 18:02:13 crc kubenswrapper[4631]: I1204 18:02:13.735229 4631 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a60b6a3-2e66-46ad-987f-9c6aac93e03f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 18:02:13 crc kubenswrapper[4631]: I1204 18:02:13.735300 4631 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a60b6a3-2e66-46ad-987f-9c6aac93e03f-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.126437 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.126344 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq" event={"ID":"8a60b6a3-2e66-46ad-987f-9c6aac93e03f","Type":"ContainerDied","Data":"680cd4e5975ac975b4b842cb46a4a4af1dfa3cbfa8bd8809883188a9a2f52368"} Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.127463 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="680cd4e5975ac975b4b842cb46a4a4af1dfa3cbfa8bd8809883188a9a2f52368" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.225517 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg"] Dec 04 18:02:14 crc kubenswrapper[4631]: E1204 18:02:14.226016 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958" containerName="keystone-cron" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.226041 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958" containerName="keystone-cron" Dec 04 18:02:14 crc kubenswrapper[4631]: E1204 18:02:14.226061 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a60b6a3-2e66-46ad-987f-9c6aac93e03f" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.226071 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a60b6a3-2e66-46ad-987f-9c6aac93e03f" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.226341 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a60b6a3-2e66-46ad-987f-9c6aac93e03f" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.226381 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958" containerName="keystone-cron" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.227140 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.234055 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg"] Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.239028 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.239210 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.239328 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-9284p" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.239478 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.268705 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bwghg\" (UID: \"9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.268824 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmmmm\" (UniqueName: \"kubernetes.io/projected/9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65-kube-api-access-wmmmm\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bwghg\" (UID: \"9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.268860 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bwghg\" (UID: \"9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.371025 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmmmm\" (UniqueName: \"kubernetes.io/projected/9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65-kube-api-access-wmmmm\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bwghg\" (UID: \"9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.371098 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bwghg\" (UID: \"9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.371236 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bwghg\" (UID: \"9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.376536 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bwghg\" (UID: \"9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.380798 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bwghg\" (UID: \"9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.387974 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmmmm\" (UniqueName: \"kubernetes.io/projected/9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65-kube-api-access-wmmmm\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bwghg\" (UID: \"9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg" Dec 04 18:02:14 crc kubenswrapper[4631]: I1204 18:02:14.585975 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg" Dec 04 18:02:15 crc kubenswrapper[4631]: I1204 18:02:15.182457 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg"] Dec 04 18:02:16 crc kubenswrapper[4631]: I1204 18:02:16.144996 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg" event={"ID":"9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65","Type":"ContainerStarted","Data":"4cfa1d9a2acd39e569dc9920324c8a0f7c02a922e09ff459714d77cc27fcc1f6"} Dec 04 18:02:16 crc kubenswrapper[4631]: I1204 18:02:16.145638 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg" event={"ID":"9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65","Type":"ContainerStarted","Data":"ef8d066a7b845a40f8067e6a6074b441dac9ac9aebc7c04c6308b319ec21c3e5"} Dec 04 18:02:16 crc kubenswrapper[4631]: I1204 18:02:16.166591 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg" podStartSLOduration=2.029839565 podStartE2EDuration="2.16657262s" podCreationTimestamp="2025-12-04 18:02:14 +0000 UTC" firstStartedPulling="2025-12-04 18:02:15.175683046 +0000 UTC m=+2065.207925044" lastFinishedPulling="2025-12-04 18:02:15.312416101 +0000 UTC m=+2065.344658099" observedRunningTime="2025-12-04 18:02:16.159850921 +0000 UTC m=+2066.192092929" watchObservedRunningTime="2025-12-04 18:02:16.16657262 +0000 UTC m=+2066.198814618" Dec 04 18:02:22 crc kubenswrapper[4631]: I1204 18:02:22.546350 4631 scope.go:117] "RemoveContainer" containerID="e59ce63ded7ad1e0e4707b1e4602b31a146be1d8012a8c66491df68b7bf32de0" Dec 04 18:02:22 crc kubenswrapper[4631]: I1204 18:02:22.584545 4631 scope.go:117] "RemoveContainer" containerID="c72412eb2687c1a80bdd9f260fbdbccbbaf38961fee3f92777bb55a593049856" Dec 04 18:02:22 crc kubenswrapper[4631]: I1204 18:02:22.631633 4631 scope.go:117] "RemoveContainer" containerID="f12dd277fd3ef68fc55962e03d460d3797eba63fd5e80c0122fbfc7b96b9f12b" Dec 04 18:02:22 crc kubenswrapper[4631]: I1204 18:02:22.676272 4631 scope.go:117] "RemoveContainer" containerID="dd2921bedf860eba92834a72932d307a218f70e5b8d79c3f82fcb22c4b8118d8" Dec 04 18:02:22 crc kubenswrapper[4631]: I1204 18:02:22.724441 4631 scope.go:117] "RemoveContainer" containerID="2a6c55ec3d8f196fc84bc5250bc2ac8f139c8df01349af9d443229a6187ced45" Dec 04 18:02:22 crc kubenswrapper[4631]: I1204 18:02:22.762617 4631 scope.go:117] "RemoveContainer" containerID="aee9972e5505514c6ecc9f92ae4ebb5759e423f6b0df49f5f0e12e6ecdf9ee45" Dec 04 18:02:30 crc kubenswrapper[4631]: I1204 18:02:30.040246 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vkqmx"] Dec 04 18:02:30 crc kubenswrapper[4631]: I1204 18:02:30.049798 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vkqmx"] Dec 04 18:02:30 crc kubenswrapper[4631]: I1204 18:02:30.251538 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3de6752-29b4-433b-8dcc-3237237aec3b" path="/var/lib/kubelet/pods/d3de6752-29b4-433b-8dcc-3237237aec3b/volumes" Dec 04 18:03:22 crc kubenswrapper[4631]: I1204 18:03:22.903907 4631 scope.go:117] "RemoveContainer" containerID="3b71d7ebbfd54a77a51efd85d5e362b36f3a7f7200c00dc8c9e6999b98e07e0f" Dec 04 18:03:32 crc kubenswrapper[4631]: I1204 18:03:32.037671 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-6c257"] Dec 04 18:03:32 crc kubenswrapper[4631]: I1204 18:03:32.047508 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-6c257"] Dec 04 18:03:32 crc kubenswrapper[4631]: I1204 18:03:32.249771 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="924a7eb5-2c29-49e5-8a1d-60525deff185" path="/var/lib/kubelet/pods/924a7eb5-2c29-49e5-8a1d-60525deff185/volumes" Dec 04 18:03:34 crc kubenswrapper[4631]: I1204 18:03:34.034281 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-92k9c"] Dec 04 18:03:34 crc kubenswrapper[4631]: I1204 18:03:34.046555 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-92k9c"] Dec 04 18:03:34 crc kubenswrapper[4631]: I1204 18:03:34.252213 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0848a6da-7687-450b-a5c0-f64a5c0ee32e" path="/var/lib/kubelet/pods/0848a6da-7687-450b-a5c0-f64a5c0ee32e/volumes" Dec 04 18:03:34 crc kubenswrapper[4631]: I1204 18:03:34.766628 4631 generic.go:334] "Generic (PLEG): container finished" podID="9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65" containerID="4cfa1d9a2acd39e569dc9920324c8a0f7c02a922e09ff459714d77cc27fcc1f6" exitCode=0 Dec 04 18:03:34 crc kubenswrapper[4631]: I1204 18:03:34.766927 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg" event={"ID":"9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65","Type":"ContainerDied","Data":"4cfa1d9a2acd39e569dc9920324c8a0f7c02a922e09ff459714d77cc27fcc1f6"} Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.180608 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg" Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.258094 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmmmm\" (UniqueName: \"kubernetes.io/projected/9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65-kube-api-access-wmmmm\") pod \"9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65\" (UID: \"9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65\") " Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.258185 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65-ssh-key\") pod \"9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65\" (UID: \"9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65\") " Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.258214 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65-inventory\") pod \"9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65\" (UID: \"9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65\") " Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.263473 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65-kube-api-access-wmmmm" (OuterVolumeSpecName: "kube-api-access-wmmmm") pod "9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65" (UID: "9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65"). InnerVolumeSpecName "kube-api-access-wmmmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.288586 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65-inventory" (OuterVolumeSpecName: "inventory") pod "9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65" (UID: "9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.319464 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65" (UID: "9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.360914 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmmmm\" (UniqueName: \"kubernetes.io/projected/9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65-kube-api-access-wmmmm\") on node \"crc\" DevicePath \"\"" Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.361176 4631 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.361272 4631 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.791757 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg" event={"ID":"9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65","Type":"ContainerDied","Data":"ef8d066a7b845a40f8067e6a6074b441dac9ac9aebc7c04c6308b319ec21c3e5"} Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.791797 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef8d066a7b845a40f8067e6a6074b441dac9ac9aebc7c04c6308b319ec21c3e5" Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.791826 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bwghg" Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.880010 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d"] Dec 04 18:03:36 crc kubenswrapper[4631]: E1204 18:03:36.880981 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.881009 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.881236 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.883456 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d" Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.886654 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.887002 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.887226 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-9284p" Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.887739 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.896857 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d"] Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.971549 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02fda8da-e708-4897-9997-9c71901e45b7-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-28v4d\" (UID: \"02fda8da-e708-4897-9997-9c71901e45b7\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d" Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.971635 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02fda8da-e708-4897-9997-9c71901e45b7-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-28v4d\" (UID: \"02fda8da-e708-4897-9997-9c71901e45b7\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d" Dec 04 18:03:36 crc kubenswrapper[4631]: I1204 18:03:36.971697 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqfss\" (UniqueName: \"kubernetes.io/projected/02fda8da-e708-4897-9997-9c71901e45b7-kube-api-access-vqfss\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-28v4d\" (UID: \"02fda8da-e708-4897-9997-9c71901e45b7\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d" Dec 04 18:03:37 crc kubenswrapper[4631]: I1204 18:03:37.073210 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02fda8da-e708-4897-9997-9c71901e45b7-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-28v4d\" (UID: \"02fda8da-e708-4897-9997-9c71901e45b7\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d" Dec 04 18:03:37 crc kubenswrapper[4631]: I1204 18:03:37.073270 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02fda8da-e708-4897-9997-9c71901e45b7-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-28v4d\" (UID: \"02fda8da-e708-4897-9997-9c71901e45b7\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d" Dec 04 18:03:37 crc kubenswrapper[4631]: I1204 18:03:37.073302 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqfss\" (UniqueName: \"kubernetes.io/projected/02fda8da-e708-4897-9997-9c71901e45b7-kube-api-access-vqfss\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-28v4d\" (UID: \"02fda8da-e708-4897-9997-9c71901e45b7\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d" Dec 04 18:03:37 crc kubenswrapper[4631]: I1204 18:03:37.078168 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02fda8da-e708-4897-9997-9c71901e45b7-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-28v4d\" (UID: \"02fda8da-e708-4897-9997-9c71901e45b7\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d" Dec 04 18:03:37 crc kubenswrapper[4631]: I1204 18:03:37.078678 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02fda8da-e708-4897-9997-9c71901e45b7-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-28v4d\" (UID: \"02fda8da-e708-4897-9997-9c71901e45b7\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d" Dec 04 18:03:37 crc kubenswrapper[4631]: I1204 18:03:37.090185 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqfss\" (UniqueName: \"kubernetes.io/projected/02fda8da-e708-4897-9997-9c71901e45b7-kube-api-access-vqfss\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-28v4d\" (UID: \"02fda8da-e708-4897-9997-9c71901e45b7\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d" Dec 04 18:03:37 crc kubenswrapper[4631]: I1204 18:03:37.201720 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d" Dec 04 18:03:37 crc kubenswrapper[4631]: I1204 18:03:37.735188 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d"] Dec 04 18:03:37 crc kubenswrapper[4631]: I1204 18:03:37.800434 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d" event={"ID":"02fda8da-e708-4897-9997-9c71901e45b7","Type":"ContainerStarted","Data":"a4b8e75841210695cc93f7f0a63e2f8edadf41a138e77ea3c7ef802fa123a54d"} Dec 04 18:03:38 crc kubenswrapper[4631]: I1204 18:03:38.810398 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d" event={"ID":"02fda8da-e708-4897-9997-9c71901e45b7","Type":"ContainerStarted","Data":"14df8144eb3d47711c1c2887bdbea8f76d9d84b72a75eb075d6ee415583b785a"} Dec 04 18:03:38 crc kubenswrapper[4631]: I1204 18:03:38.834047 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d" podStartSLOduration=2.678110951 podStartE2EDuration="2.834032318s" podCreationTimestamp="2025-12-04 18:03:36 +0000 UTC" firstStartedPulling="2025-12-04 18:03:37.752945273 +0000 UTC m=+2147.785187271" lastFinishedPulling="2025-12-04 18:03:37.90886664 +0000 UTC m=+2147.941108638" observedRunningTime="2025-12-04 18:03:38.827708859 +0000 UTC m=+2148.859950877" watchObservedRunningTime="2025-12-04 18:03:38.834032318 +0000 UTC m=+2148.866274316" Dec 04 18:03:43 crc kubenswrapper[4631]: I1204 18:03:43.862948 4631 generic.go:334] "Generic (PLEG): container finished" podID="02fda8da-e708-4897-9997-9c71901e45b7" containerID="14df8144eb3d47711c1c2887bdbea8f76d9d84b72a75eb075d6ee415583b785a" exitCode=0 Dec 04 18:03:43 crc kubenswrapper[4631]: I1204 18:03:43.863036 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d" event={"ID":"02fda8da-e708-4897-9997-9c71901e45b7","Type":"ContainerDied","Data":"14df8144eb3d47711c1c2887bdbea8f76d9d84b72a75eb075d6ee415583b785a"} Dec 04 18:03:45 crc kubenswrapper[4631]: I1204 18:03:45.280893 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d" Dec 04 18:03:45 crc kubenswrapper[4631]: I1204 18:03:45.418268 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02fda8da-e708-4897-9997-9c71901e45b7-inventory\") pod \"02fda8da-e708-4897-9997-9c71901e45b7\" (UID: \"02fda8da-e708-4897-9997-9c71901e45b7\") " Dec 04 18:03:45 crc kubenswrapper[4631]: I1204 18:03:45.418470 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02fda8da-e708-4897-9997-9c71901e45b7-ssh-key\") pod \"02fda8da-e708-4897-9997-9c71901e45b7\" (UID: \"02fda8da-e708-4897-9997-9c71901e45b7\") " Dec 04 18:03:45 crc kubenswrapper[4631]: I1204 18:03:45.418588 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqfss\" (UniqueName: \"kubernetes.io/projected/02fda8da-e708-4897-9997-9c71901e45b7-kube-api-access-vqfss\") pod \"02fda8da-e708-4897-9997-9c71901e45b7\" (UID: \"02fda8da-e708-4897-9997-9c71901e45b7\") " Dec 04 18:03:45 crc kubenswrapper[4631]: I1204 18:03:45.425269 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02fda8da-e708-4897-9997-9c71901e45b7-kube-api-access-vqfss" (OuterVolumeSpecName: "kube-api-access-vqfss") pod "02fda8da-e708-4897-9997-9c71901e45b7" (UID: "02fda8da-e708-4897-9997-9c71901e45b7"). InnerVolumeSpecName "kube-api-access-vqfss". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:03:45 crc kubenswrapper[4631]: I1204 18:03:45.452389 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02fda8da-e708-4897-9997-9c71901e45b7-inventory" (OuterVolumeSpecName: "inventory") pod "02fda8da-e708-4897-9997-9c71901e45b7" (UID: "02fda8da-e708-4897-9997-9c71901e45b7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:03:45 crc kubenswrapper[4631]: I1204 18:03:45.458136 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02fda8da-e708-4897-9997-9c71901e45b7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "02fda8da-e708-4897-9997-9c71901e45b7" (UID: "02fda8da-e708-4897-9997-9c71901e45b7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:03:45 crc kubenswrapper[4631]: I1204 18:03:45.520467 4631 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02fda8da-e708-4897-9997-9c71901e45b7-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 18:03:45 crc kubenswrapper[4631]: I1204 18:03:45.520614 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqfss\" (UniqueName: \"kubernetes.io/projected/02fda8da-e708-4897-9997-9c71901e45b7-kube-api-access-vqfss\") on node \"crc\" DevicePath \"\"" Dec 04 18:03:45 crc kubenswrapper[4631]: I1204 18:03:45.520682 4631 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02fda8da-e708-4897-9997-9c71901e45b7-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 18:03:45 crc kubenswrapper[4631]: I1204 18:03:45.881416 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d" Dec 04 18:03:45 crc kubenswrapper[4631]: I1204 18:03:45.881345 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-28v4d" event={"ID":"02fda8da-e708-4897-9997-9c71901e45b7","Type":"ContainerDied","Data":"a4b8e75841210695cc93f7f0a63e2f8edadf41a138e77ea3c7ef802fa123a54d"} Dec 04 18:03:45 crc kubenswrapper[4631]: I1204 18:03:45.881807 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4b8e75841210695cc93f7f0a63e2f8edadf41a138e77ea3c7ef802fa123a54d" Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.041930 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl"] Dec 04 18:03:46 crc kubenswrapper[4631]: E1204 18:03:46.042772 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02fda8da-e708-4897-9997-9c71901e45b7" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.042792 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="02fda8da-e708-4897-9997-9c71901e45b7" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.043035 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="02fda8da-e708-4897-9997-9c71901e45b7" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.043767 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl" Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.045347 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-9284p" Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.045703 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.045969 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.046036 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.058206 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl"] Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.131934 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5zdzl\" (UID: \"191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl" Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.132002 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5zdzl\" (UID: \"191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl" Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.132122 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hn4sb\" (UniqueName: \"kubernetes.io/projected/191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef-kube-api-access-hn4sb\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5zdzl\" (UID: \"191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl" Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.233789 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hn4sb\" (UniqueName: \"kubernetes.io/projected/191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef-kube-api-access-hn4sb\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5zdzl\" (UID: \"191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl" Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.234065 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5zdzl\" (UID: \"191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl" Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.234179 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5zdzl\" (UID: \"191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl" Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.239014 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5zdzl\" (UID: \"191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl" Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.241997 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5zdzl\" (UID: \"191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl" Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.259147 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hn4sb\" (UniqueName: \"kubernetes.io/projected/191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef-kube-api-access-hn4sb\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5zdzl\" (UID: \"191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl" Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.365666 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl" Dec 04 18:03:46 crc kubenswrapper[4631]: W1204 18:03:46.893791 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod191d10cc_aa2a_48dd_bbe5_ee2a4f63fcef.slice/crio-b6045e6e791e0e6f07af18645f3c73cd628eec43824e0e1358e77e3a90e9d30f WatchSource:0}: Error finding container b6045e6e791e0e6f07af18645f3c73cd628eec43824e0e1358e77e3a90e9d30f: Status 404 returned error can't find the container with id b6045e6e791e0e6f07af18645f3c73cd628eec43824e0e1358e77e3a90e9d30f Dec 04 18:03:46 crc kubenswrapper[4631]: I1204 18:03:46.898230 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl"] Dec 04 18:03:47 crc kubenswrapper[4631]: I1204 18:03:47.897207 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl" event={"ID":"191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef","Type":"ContainerStarted","Data":"5ffa5d7962b9adf06fedb06ffdcc48b04abb3b785cefa5eb2a01a1240b65e821"} Dec 04 18:03:47 crc kubenswrapper[4631]: I1204 18:03:47.897786 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl" event={"ID":"191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef","Type":"ContainerStarted","Data":"b6045e6e791e0e6f07af18645f3c73cd628eec43824e0e1358e77e3a90e9d30f"} Dec 04 18:03:47 crc kubenswrapper[4631]: I1204 18:03:47.919624 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl" podStartSLOduration=1.7489574509999999 podStartE2EDuration="1.919606157s" podCreationTimestamp="2025-12-04 18:03:46 +0000 UTC" firstStartedPulling="2025-12-04 18:03:46.900553511 +0000 UTC m=+2156.932795519" lastFinishedPulling="2025-12-04 18:03:47.071202237 +0000 UTC m=+2157.103444225" observedRunningTime="2025-12-04 18:03:47.912817974 +0000 UTC m=+2157.945059972" watchObservedRunningTime="2025-12-04 18:03:47.919606157 +0000 UTC m=+2157.951848155" Dec 04 18:04:06 crc kubenswrapper[4631]: I1204 18:04:06.022553 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:04:06 crc kubenswrapper[4631]: I1204 18:04:06.022993 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:04:16 crc kubenswrapper[4631]: I1204 18:04:16.065959 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-slhqd"] Dec 04 18:04:16 crc kubenswrapper[4631]: I1204 18:04:16.084000 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-slhqd"] Dec 04 18:04:16 crc kubenswrapper[4631]: I1204 18:04:16.255959 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10901c62-65f7-43ed-ab1e-93c30cb5b5f9" path="/var/lib/kubelet/pods/10901c62-65f7-43ed-ab1e-93c30cb5b5f9/volumes" Dec 04 18:04:22 crc kubenswrapper[4631]: I1204 18:04:22.990176 4631 scope.go:117] "RemoveContainer" containerID="987727ba72f5119a2838995bf36f03f672f805ba31748431130dba383bbb0ef1" Dec 04 18:04:23 crc kubenswrapper[4631]: I1204 18:04:23.024775 4631 scope.go:117] "RemoveContainer" containerID="1effa5b1412a3fbf930a9c959f059909d6c4e0d94184100b87c360a6dc8e27f7" Dec 04 18:04:23 crc kubenswrapper[4631]: I1204 18:04:23.075281 4631 scope.go:117] "RemoveContainer" containerID="53bf6f1989ff93e21110faf35f9bc52c6d6555fb2a1fe47c841ac363ba02d3a4" Dec 04 18:04:33 crc kubenswrapper[4631]: I1204 18:04:33.279431 4631 generic.go:334] "Generic (PLEG): container finished" podID="191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef" containerID="5ffa5d7962b9adf06fedb06ffdcc48b04abb3b785cefa5eb2a01a1240b65e821" exitCode=0 Dec 04 18:04:33 crc kubenswrapper[4631]: I1204 18:04:33.279567 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl" event={"ID":"191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef","Type":"ContainerDied","Data":"5ffa5d7962b9adf06fedb06ffdcc48b04abb3b785cefa5eb2a01a1240b65e821"} Dec 04 18:04:34 crc kubenswrapper[4631]: I1204 18:04:34.747991 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl" Dec 04 18:04:34 crc kubenswrapper[4631]: I1204 18:04:34.771163 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef-ssh-key\") pod \"191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef\" (UID: \"191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef\") " Dec 04 18:04:34 crc kubenswrapper[4631]: I1204 18:04:34.771277 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hn4sb\" (UniqueName: \"kubernetes.io/projected/191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef-kube-api-access-hn4sb\") pod \"191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef\" (UID: \"191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef\") " Dec 04 18:04:34 crc kubenswrapper[4631]: I1204 18:04:34.771341 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef-inventory\") pod \"191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef\" (UID: \"191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef\") " Dec 04 18:04:34 crc kubenswrapper[4631]: I1204 18:04:34.791211 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef-kube-api-access-hn4sb" (OuterVolumeSpecName: "kube-api-access-hn4sb") pod "191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef" (UID: "191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef"). InnerVolumeSpecName "kube-api-access-hn4sb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:04:34 crc kubenswrapper[4631]: I1204 18:04:34.803536 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef-inventory" (OuterVolumeSpecName: "inventory") pod "191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef" (UID: "191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:04:34 crc kubenswrapper[4631]: I1204 18:04:34.820861 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef" (UID: "191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:04:34 crc kubenswrapper[4631]: I1204 18:04:34.873803 4631 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 18:04:34 crc kubenswrapper[4631]: I1204 18:04:34.873834 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hn4sb\" (UniqueName: \"kubernetes.io/projected/191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef-kube-api-access-hn4sb\") on node \"crc\" DevicePath \"\"" Dec 04 18:04:34 crc kubenswrapper[4631]: I1204 18:04:34.873846 4631 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.299243 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl" event={"ID":"191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef","Type":"ContainerDied","Data":"b6045e6e791e0e6f07af18645f3c73cd628eec43824e0e1358e77e3a90e9d30f"} Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.299601 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6045e6e791e0e6f07af18645f3c73cd628eec43824e0e1358e77e3a90e9d30f" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.299654 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5zdzl" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.400505 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s"] Dec 04 18:04:35 crc kubenswrapper[4631]: E1204 18:04:35.401086 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.401112 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.401391 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.402207 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.407699 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.407779 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.407937 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.408220 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-9284p" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.418558 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s"] Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.486044 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c303f9ff-2337-47a3-8e07-4ace557cc99a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s\" (UID: \"c303f9ff-2337-47a3-8e07-4ace557cc99a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.486299 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjbx4\" (UniqueName: \"kubernetes.io/projected/c303f9ff-2337-47a3-8e07-4ace557cc99a-kube-api-access-kjbx4\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s\" (UID: \"c303f9ff-2337-47a3-8e07-4ace557cc99a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.486467 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c303f9ff-2337-47a3-8e07-4ace557cc99a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s\" (UID: \"c303f9ff-2337-47a3-8e07-4ace557cc99a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.588623 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c303f9ff-2337-47a3-8e07-4ace557cc99a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s\" (UID: \"c303f9ff-2337-47a3-8e07-4ace557cc99a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.588712 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjbx4\" (UniqueName: \"kubernetes.io/projected/c303f9ff-2337-47a3-8e07-4ace557cc99a-kube-api-access-kjbx4\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s\" (UID: \"c303f9ff-2337-47a3-8e07-4ace557cc99a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.588775 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c303f9ff-2337-47a3-8e07-4ace557cc99a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s\" (UID: \"c303f9ff-2337-47a3-8e07-4ace557cc99a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.593358 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c303f9ff-2337-47a3-8e07-4ace557cc99a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s\" (UID: \"c303f9ff-2337-47a3-8e07-4ace557cc99a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.595278 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c303f9ff-2337-47a3-8e07-4ace557cc99a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s\" (UID: \"c303f9ff-2337-47a3-8e07-4ace557cc99a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.606249 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjbx4\" (UniqueName: \"kubernetes.io/projected/c303f9ff-2337-47a3-8e07-4ace557cc99a-kube-api-access-kjbx4\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s\" (UID: \"c303f9ff-2337-47a3-8e07-4ace557cc99a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s" Dec 04 18:04:35 crc kubenswrapper[4631]: I1204 18:04:35.720516 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s" Dec 04 18:04:36 crc kubenswrapper[4631]: I1204 18:04:36.023055 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:04:36 crc kubenswrapper[4631]: I1204 18:04:36.023480 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:04:36 crc kubenswrapper[4631]: I1204 18:04:36.366871 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s"] Dec 04 18:04:37 crc kubenswrapper[4631]: I1204 18:04:37.329449 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s" event={"ID":"c303f9ff-2337-47a3-8e07-4ace557cc99a","Type":"ContainerStarted","Data":"1ea67aa44cfa5b313d8d0ac7899352f4ae7d44b75c14189cec9b144782a9517b"} Dec 04 18:04:37 crc kubenswrapper[4631]: I1204 18:04:37.330013 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s" event={"ID":"c303f9ff-2337-47a3-8e07-4ace557cc99a","Type":"ContainerStarted","Data":"e03ce6d9b709606b71b41c6905bc42c8f34dbf0771814f3a0a677c082fd40eab"} Dec 04 18:04:37 crc kubenswrapper[4631]: I1204 18:04:37.355781 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s" podStartSLOduration=2.19955422 podStartE2EDuration="2.355757205s" podCreationTimestamp="2025-12-04 18:04:35 +0000 UTC" firstStartedPulling="2025-12-04 18:04:36.377462247 +0000 UTC m=+2206.409704245" lastFinishedPulling="2025-12-04 18:04:36.533665232 +0000 UTC m=+2206.565907230" observedRunningTime="2025-12-04 18:04:37.34785133 +0000 UTC m=+2207.380093378" watchObservedRunningTime="2025-12-04 18:04:37.355757205 +0000 UTC m=+2207.387999243" Dec 04 18:04:39 crc kubenswrapper[4631]: I1204 18:04:39.737447 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bfv79"] Dec 04 18:04:39 crc kubenswrapper[4631]: I1204 18:04:39.739608 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bfv79" Dec 04 18:04:39 crc kubenswrapper[4631]: I1204 18:04:39.764168 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-db26j\" (UniqueName: \"kubernetes.io/projected/84cc41dd-a1c9-44fa-b297-4cf783316e2a-kube-api-access-db26j\") pod \"redhat-marketplace-bfv79\" (UID: \"84cc41dd-a1c9-44fa-b297-4cf783316e2a\") " pod="openshift-marketplace/redhat-marketplace-bfv79" Dec 04 18:04:39 crc kubenswrapper[4631]: I1204 18:04:39.764273 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84cc41dd-a1c9-44fa-b297-4cf783316e2a-utilities\") pod \"redhat-marketplace-bfv79\" (UID: \"84cc41dd-a1c9-44fa-b297-4cf783316e2a\") " pod="openshift-marketplace/redhat-marketplace-bfv79" Dec 04 18:04:39 crc kubenswrapper[4631]: I1204 18:04:39.764351 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84cc41dd-a1c9-44fa-b297-4cf783316e2a-catalog-content\") pod \"redhat-marketplace-bfv79\" (UID: \"84cc41dd-a1c9-44fa-b297-4cf783316e2a\") " pod="openshift-marketplace/redhat-marketplace-bfv79" Dec 04 18:04:39 crc kubenswrapper[4631]: I1204 18:04:39.769616 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bfv79"] Dec 04 18:04:39 crc kubenswrapper[4631]: I1204 18:04:39.865714 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-db26j\" (UniqueName: \"kubernetes.io/projected/84cc41dd-a1c9-44fa-b297-4cf783316e2a-kube-api-access-db26j\") pod \"redhat-marketplace-bfv79\" (UID: \"84cc41dd-a1c9-44fa-b297-4cf783316e2a\") " pod="openshift-marketplace/redhat-marketplace-bfv79" Dec 04 18:04:39 crc kubenswrapper[4631]: I1204 18:04:39.865799 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84cc41dd-a1c9-44fa-b297-4cf783316e2a-utilities\") pod \"redhat-marketplace-bfv79\" (UID: \"84cc41dd-a1c9-44fa-b297-4cf783316e2a\") " pod="openshift-marketplace/redhat-marketplace-bfv79" Dec 04 18:04:39 crc kubenswrapper[4631]: I1204 18:04:39.865853 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84cc41dd-a1c9-44fa-b297-4cf783316e2a-catalog-content\") pod \"redhat-marketplace-bfv79\" (UID: \"84cc41dd-a1c9-44fa-b297-4cf783316e2a\") " pod="openshift-marketplace/redhat-marketplace-bfv79" Dec 04 18:04:39 crc kubenswrapper[4631]: I1204 18:04:39.866501 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84cc41dd-a1c9-44fa-b297-4cf783316e2a-catalog-content\") pod \"redhat-marketplace-bfv79\" (UID: \"84cc41dd-a1c9-44fa-b297-4cf783316e2a\") " pod="openshift-marketplace/redhat-marketplace-bfv79" Dec 04 18:04:39 crc kubenswrapper[4631]: I1204 18:04:39.866499 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84cc41dd-a1c9-44fa-b297-4cf783316e2a-utilities\") pod \"redhat-marketplace-bfv79\" (UID: \"84cc41dd-a1c9-44fa-b297-4cf783316e2a\") " pod="openshift-marketplace/redhat-marketplace-bfv79" Dec 04 18:04:39 crc kubenswrapper[4631]: I1204 18:04:39.888344 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-db26j\" (UniqueName: \"kubernetes.io/projected/84cc41dd-a1c9-44fa-b297-4cf783316e2a-kube-api-access-db26j\") pod \"redhat-marketplace-bfv79\" (UID: \"84cc41dd-a1c9-44fa-b297-4cf783316e2a\") " pod="openshift-marketplace/redhat-marketplace-bfv79" Dec 04 18:04:40 crc kubenswrapper[4631]: I1204 18:04:40.076277 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bfv79" Dec 04 18:04:40 crc kubenswrapper[4631]: W1204 18:04:40.565485 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84cc41dd_a1c9_44fa_b297_4cf783316e2a.slice/crio-4ce531a81fa8e302f61138039a3607cf14296c55f47dd27b18b959c6d5a804e3 WatchSource:0}: Error finding container 4ce531a81fa8e302f61138039a3607cf14296c55f47dd27b18b959c6d5a804e3: Status 404 returned error can't find the container with id 4ce531a81fa8e302f61138039a3607cf14296c55f47dd27b18b959c6d5a804e3 Dec 04 18:04:40 crc kubenswrapper[4631]: I1204 18:04:40.577163 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bfv79"] Dec 04 18:04:41 crc kubenswrapper[4631]: I1204 18:04:41.373947 4631 generic.go:334] "Generic (PLEG): container finished" podID="84cc41dd-a1c9-44fa-b297-4cf783316e2a" containerID="bc6ab5b229bf64f61539dc79aa679dda877fd845bc8fa6a66c5fda83bb1da509" exitCode=0 Dec 04 18:04:41 crc kubenswrapper[4631]: I1204 18:04:41.374067 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bfv79" event={"ID":"84cc41dd-a1c9-44fa-b297-4cf783316e2a","Type":"ContainerDied","Data":"bc6ab5b229bf64f61539dc79aa679dda877fd845bc8fa6a66c5fda83bb1da509"} Dec 04 18:04:41 crc kubenswrapper[4631]: I1204 18:04:41.374320 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bfv79" event={"ID":"84cc41dd-a1c9-44fa-b297-4cf783316e2a","Type":"ContainerStarted","Data":"4ce531a81fa8e302f61138039a3607cf14296c55f47dd27b18b959c6d5a804e3"} Dec 04 18:04:42 crc kubenswrapper[4631]: I1204 18:04:42.387073 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bfv79" event={"ID":"84cc41dd-a1c9-44fa-b297-4cf783316e2a","Type":"ContainerStarted","Data":"e0219c809b99125f650034213c0c83268db88c5166ed97dd54642b9f413cc9de"} Dec 04 18:04:43 crc kubenswrapper[4631]: I1204 18:04:43.417432 4631 generic.go:334] "Generic (PLEG): container finished" podID="84cc41dd-a1c9-44fa-b297-4cf783316e2a" containerID="e0219c809b99125f650034213c0c83268db88c5166ed97dd54642b9f413cc9de" exitCode=0 Dec 04 18:04:43 crc kubenswrapper[4631]: I1204 18:04:43.417475 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bfv79" event={"ID":"84cc41dd-a1c9-44fa-b297-4cf783316e2a","Type":"ContainerDied","Data":"e0219c809b99125f650034213c0c83268db88c5166ed97dd54642b9f413cc9de"} Dec 04 18:04:44 crc kubenswrapper[4631]: I1204 18:04:44.427638 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bfv79" event={"ID":"84cc41dd-a1c9-44fa-b297-4cf783316e2a","Type":"ContainerStarted","Data":"3aae9e163526d514bafbd952404e16c59c6ac2d4900985a74f31d106fde11ec0"} Dec 04 18:04:44 crc kubenswrapper[4631]: I1204 18:04:44.455976 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bfv79" podStartSLOduration=3.041163413 podStartE2EDuration="5.455955399s" podCreationTimestamp="2025-12-04 18:04:39 +0000 UTC" firstStartedPulling="2025-12-04 18:04:41.376634724 +0000 UTC m=+2211.408876712" lastFinishedPulling="2025-12-04 18:04:43.7914267 +0000 UTC m=+2213.823668698" observedRunningTime="2025-12-04 18:04:44.449967279 +0000 UTC m=+2214.482209277" watchObservedRunningTime="2025-12-04 18:04:44.455955399 +0000 UTC m=+2214.488197397" Dec 04 18:04:50 crc kubenswrapper[4631]: I1204 18:04:50.077298 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bfv79" Dec 04 18:04:50 crc kubenswrapper[4631]: I1204 18:04:50.078057 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bfv79" Dec 04 18:04:50 crc kubenswrapper[4631]: I1204 18:04:50.134597 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bfv79" Dec 04 18:04:50 crc kubenswrapper[4631]: I1204 18:04:50.521684 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bfv79" Dec 04 18:04:50 crc kubenswrapper[4631]: I1204 18:04:50.568730 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bfv79"] Dec 04 18:04:52 crc kubenswrapper[4631]: I1204 18:04:52.495068 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bfv79" podUID="84cc41dd-a1c9-44fa-b297-4cf783316e2a" containerName="registry-server" containerID="cri-o://3aae9e163526d514bafbd952404e16c59c6ac2d4900985a74f31d106fde11ec0" gracePeriod=2 Dec 04 18:04:52 crc kubenswrapper[4631]: I1204 18:04:52.970508 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bfv79" Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.029357 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-db26j\" (UniqueName: \"kubernetes.io/projected/84cc41dd-a1c9-44fa-b297-4cf783316e2a-kube-api-access-db26j\") pod \"84cc41dd-a1c9-44fa-b297-4cf783316e2a\" (UID: \"84cc41dd-a1c9-44fa-b297-4cf783316e2a\") " Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.029418 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84cc41dd-a1c9-44fa-b297-4cf783316e2a-catalog-content\") pod \"84cc41dd-a1c9-44fa-b297-4cf783316e2a\" (UID: \"84cc41dd-a1c9-44fa-b297-4cf783316e2a\") " Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.035524 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84cc41dd-a1c9-44fa-b297-4cf783316e2a-kube-api-access-db26j" (OuterVolumeSpecName: "kube-api-access-db26j") pod "84cc41dd-a1c9-44fa-b297-4cf783316e2a" (UID: "84cc41dd-a1c9-44fa-b297-4cf783316e2a"). InnerVolumeSpecName "kube-api-access-db26j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.059203 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84cc41dd-a1c9-44fa-b297-4cf783316e2a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "84cc41dd-a1c9-44fa-b297-4cf783316e2a" (UID: "84cc41dd-a1c9-44fa-b297-4cf783316e2a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.130982 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84cc41dd-a1c9-44fa-b297-4cf783316e2a-utilities\") pod \"84cc41dd-a1c9-44fa-b297-4cf783316e2a\" (UID: \"84cc41dd-a1c9-44fa-b297-4cf783316e2a\") " Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.132061 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-db26j\" (UniqueName: \"kubernetes.io/projected/84cc41dd-a1c9-44fa-b297-4cf783316e2a-kube-api-access-db26j\") on node \"crc\" DevicePath \"\"" Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.132058 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84cc41dd-a1c9-44fa-b297-4cf783316e2a-utilities" (OuterVolumeSpecName: "utilities") pod "84cc41dd-a1c9-44fa-b297-4cf783316e2a" (UID: "84cc41dd-a1c9-44fa-b297-4cf783316e2a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.132090 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84cc41dd-a1c9-44fa-b297-4cf783316e2a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.234411 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84cc41dd-a1c9-44fa-b297-4cf783316e2a-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.506445 4631 generic.go:334] "Generic (PLEG): container finished" podID="84cc41dd-a1c9-44fa-b297-4cf783316e2a" containerID="3aae9e163526d514bafbd952404e16c59c6ac2d4900985a74f31d106fde11ec0" exitCode=0 Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.506504 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bfv79" event={"ID":"84cc41dd-a1c9-44fa-b297-4cf783316e2a","Type":"ContainerDied","Data":"3aae9e163526d514bafbd952404e16c59c6ac2d4900985a74f31d106fde11ec0"} Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.506552 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bfv79" event={"ID":"84cc41dd-a1c9-44fa-b297-4cf783316e2a","Type":"ContainerDied","Data":"4ce531a81fa8e302f61138039a3607cf14296c55f47dd27b18b959c6d5a804e3"} Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.506530 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bfv79" Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.506573 4631 scope.go:117] "RemoveContainer" containerID="3aae9e163526d514bafbd952404e16c59c6ac2d4900985a74f31d106fde11ec0" Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.538811 4631 scope.go:117] "RemoveContainer" containerID="e0219c809b99125f650034213c0c83268db88c5166ed97dd54642b9f413cc9de" Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.563099 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bfv79"] Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.588550 4631 scope.go:117] "RemoveContainer" containerID="bc6ab5b229bf64f61539dc79aa679dda877fd845bc8fa6a66c5fda83bb1da509" Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.604900 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bfv79"] Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.623850 4631 scope.go:117] "RemoveContainer" containerID="3aae9e163526d514bafbd952404e16c59c6ac2d4900985a74f31d106fde11ec0" Dec 04 18:04:53 crc kubenswrapper[4631]: E1204 18:04:53.627809 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3aae9e163526d514bafbd952404e16c59c6ac2d4900985a74f31d106fde11ec0\": container with ID starting with 3aae9e163526d514bafbd952404e16c59c6ac2d4900985a74f31d106fde11ec0 not found: ID does not exist" containerID="3aae9e163526d514bafbd952404e16c59c6ac2d4900985a74f31d106fde11ec0" Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.627859 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3aae9e163526d514bafbd952404e16c59c6ac2d4900985a74f31d106fde11ec0"} err="failed to get container status \"3aae9e163526d514bafbd952404e16c59c6ac2d4900985a74f31d106fde11ec0\": rpc error: code = NotFound desc = could not find container \"3aae9e163526d514bafbd952404e16c59c6ac2d4900985a74f31d106fde11ec0\": container with ID starting with 3aae9e163526d514bafbd952404e16c59c6ac2d4900985a74f31d106fde11ec0 not found: ID does not exist" Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.627882 4631 scope.go:117] "RemoveContainer" containerID="e0219c809b99125f650034213c0c83268db88c5166ed97dd54642b9f413cc9de" Dec 04 18:04:53 crc kubenswrapper[4631]: E1204 18:04:53.628184 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0219c809b99125f650034213c0c83268db88c5166ed97dd54642b9f413cc9de\": container with ID starting with e0219c809b99125f650034213c0c83268db88c5166ed97dd54642b9f413cc9de not found: ID does not exist" containerID="e0219c809b99125f650034213c0c83268db88c5166ed97dd54642b9f413cc9de" Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.628208 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0219c809b99125f650034213c0c83268db88c5166ed97dd54642b9f413cc9de"} err="failed to get container status \"e0219c809b99125f650034213c0c83268db88c5166ed97dd54642b9f413cc9de\": rpc error: code = NotFound desc = could not find container \"e0219c809b99125f650034213c0c83268db88c5166ed97dd54642b9f413cc9de\": container with ID starting with e0219c809b99125f650034213c0c83268db88c5166ed97dd54642b9f413cc9de not found: ID does not exist" Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.628222 4631 scope.go:117] "RemoveContainer" containerID="bc6ab5b229bf64f61539dc79aa679dda877fd845bc8fa6a66c5fda83bb1da509" Dec 04 18:04:53 crc kubenswrapper[4631]: E1204 18:04:53.628444 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc6ab5b229bf64f61539dc79aa679dda877fd845bc8fa6a66c5fda83bb1da509\": container with ID starting with bc6ab5b229bf64f61539dc79aa679dda877fd845bc8fa6a66c5fda83bb1da509 not found: ID does not exist" containerID="bc6ab5b229bf64f61539dc79aa679dda877fd845bc8fa6a66c5fda83bb1da509" Dec 04 18:04:53 crc kubenswrapper[4631]: I1204 18:04:53.628500 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc6ab5b229bf64f61539dc79aa679dda877fd845bc8fa6a66c5fda83bb1da509"} err="failed to get container status \"bc6ab5b229bf64f61539dc79aa679dda877fd845bc8fa6a66c5fda83bb1da509\": rpc error: code = NotFound desc = could not find container \"bc6ab5b229bf64f61539dc79aa679dda877fd845bc8fa6a66c5fda83bb1da509\": container with ID starting with bc6ab5b229bf64f61539dc79aa679dda877fd845bc8fa6a66c5fda83bb1da509 not found: ID does not exist" Dec 04 18:04:54 crc kubenswrapper[4631]: I1204 18:04:54.253075 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84cc41dd-a1c9-44fa-b297-4cf783316e2a" path="/var/lib/kubelet/pods/84cc41dd-a1c9-44fa-b297-4cf783316e2a/volumes" Dec 04 18:05:06 crc kubenswrapper[4631]: I1204 18:05:06.022818 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:05:06 crc kubenswrapper[4631]: I1204 18:05:06.023362 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:05:06 crc kubenswrapper[4631]: I1204 18:05:06.023418 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 18:05:06 crc kubenswrapper[4631]: I1204 18:05:06.024094 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6"} pod="openshift-machine-config-operator/machine-config-daemon-q27wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 18:05:06 crc kubenswrapper[4631]: I1204 18:05:06.024162 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" containerID="cri-o://f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" gracePeriod=600 Dec 04 18:05:06 crc kubenswrapper[4631]: E1204 18:05:06.141850 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:05:06 crc kubenswrapper[4631]: I1204 18:05:06.645236 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" exitCode=0 Dec 04 18:05:06 crc kubenswrapper[4631]: I1204 18:05:06.645295 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerDied","Data":"f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6"} Dec 04 18:05:06 crc kubenswrapper[4631]: I1204 18:05:06.645728 4631 scope.go:117] "RemoveContainer" containerID="93cd7a21a80bbde50526477519b7b8631d88aed11055adc56d8ec73b1d4639e3" Dec 04 18:05:06 crc kubenswrapper[4631]: I1204 18:05:06.649454 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:05:06 crc kubenswrapper[4631]: E1204 18:05:06.650126 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:05:22 crc kubenswrapper[4631]: I1204 18:05:22.239241 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:05:22 crc kubenswrapper[4631]: E1204 18:05:22.240058 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:05:32 crc kubenswrapper[4631]: I1204 18:05:32.868334 4631 generic.go:334] "Generic (PLEG): container finished" podID="c303f9ff-2337-47a3-8e07-4ace557cc99a" containerID="1ea67aa44cfa5b313d8d0ac7899352f4ae7d44b75c14189cec9b144782a9517b" exitCode=0 Dec 04 18:05:32 crc kubenswrapper[4631]: I1204 18:05:32.868400 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s" event={"ID":"c303f9ff-2337-47a3-8e07-4ace557cc99a","Type":"ContainerDied","Data":"1ea67aa44cfa5b313d8d0ac7899352f4ae7d44b75c14189cec9b144782a9517b"} Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.280100 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.459781 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c303f9ff-2337-47a3-8e07-4ace557cc99a-inventory\") pod \"c303f9ff-2337-47a3-8e07-4ace557cc99a\" (UID: \"c303f9ff-2337-47a3-8e07-4ace557cc99a\") " Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.460257 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjbx4\" (UniqueName: \"kubernetes.io/projected/c303f9ff-2337-47a3-8e07-4ace557cc99a-kube-api-access-kjbx4\") pod \"c303f9ff-2337-47a3-8e07-4ace557cc99a\" (UID: \"c303f9ff-2337-47a3-8e07-4ace557cc99a\") " Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.460471 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c303f9ff-2337-47a3-8e07-4ace557cc99a-ssh-key\") pod \"c303f9ff-2337-47a3-8e07-4ace557cc99a\" (UID: \"c303f9ff-2337-47a3-8e07-4ace557cc99a\") " Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.467500 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c303f9ff-2337-47a3-8e07-4ace557cc99a-kube-api-access-kjbx4" (OuterVolumeSpecName: "kube-api-access-kjbx4") pod "c303f9ff-2337-47a3-8e07-4ace557cc99a" (UID: "c303f9ff-2337-47a3-8e07-4ace557cc99a"). InnerVolumeSpecName "kube-api-access-kjbx4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.491422 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c303f9ff-2337-47a3-8e07-4ace557cc99a-inventory" (OuterVolumeSpecName: "inventory") pod "c303f9ff-2337-47a3-8e07-4ace557cc99a" (UID: "c303f9ff-2337-47a3-8e07-4ace557cc99a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.491923 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c303f9ff-2337-47a3-8e07-4ace557cc99a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c303f9ff-2337-47a3-8e07-4ace557cc99a" (UID: "c303f9ff-2337-47a3-8e07-4ace557cc99a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.562505 4631 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c303f9ff-2337-47a3-8e07-4ace557cc99a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.562537 4631 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c303f9ff-2337-47a3-8e07-4ace557cc99a-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.562552 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjbx4\" (UniqueName: \"kubernetes.io/projected/c303f9ff-2337-47a3-8e07-4ace557cc99a-kube-api-access-kjbx4\") on node \"crc\" DevicePath \"\"" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.887269 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s" event={"ID":"c303f9ff-2337-47a3-8e07-4ace557cc99a","Type":"ContainerDied","Data":"e03ce6d9b709606b71b41c6905bc42c8f34dbf0771814f3a0a677c082fd40eab"} Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.887313 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e03ce6d9b709606b71b41c6905bc42c8f34dbf0771814f3a0a677c082fd40eab" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.887337 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.966113 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-8c42n"] Dec 04 18:05:34 crc kubenswrapper[4631]: E1204 18:05:34.966487 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84cc41dd-a1c9-44fa-b297-4cf783316e2a" containerName="registry-server" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.966507 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="84cc41dd-a1c9-44fa-b297-4cf783316e2a" containerName="registry-server" Dec 04 18:05:34 crc kubenswrapper[4631]: E1204 18:05:34.966523 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84cc41dd-a1c9-44fa-b297-4cf783316e2a" containerName="extract-content" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.966529 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="84cc41dd-a1c9-44fa-b297-4cf783316e2a" containerName="extract-content" Dec 04 18:05:34 crc kubenswrapper[4631]: E1204 18:05:34.966541 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c303f9ff-2337-47a3-8e07-4ace557cc99a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.966548 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="c303f9ff-2337-47a3-8e07-4ace557cc99a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 04 18:05:34 crc kubenswrapper[4631]: E1204 18:05:34.966568 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84cc41dd-a1c9-44fa-b297-4cf783316e2a" containerName="extract-utilities" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.966576 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="84cc41dd-a1c9-44fa-b297-4cf783316e2a" containerName="extract-utilities" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.966767 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="c303f9ff-2337-47a3-8e07-4ace557cc99a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.966785 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="84cc41dd-a1c9-44fa-b297-4cf783316e2a" containerName="registry-server" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.967348 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8c42n" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.969437 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.969511 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.970176 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-9284p" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.971334 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 18:05:34 crc kubenswrapper[4631]: I1204 18:05:34.986796 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-8c42n"] Dec 04 18:05:35 crc kubenswrapper[4631]: I1204 18:05:35.071679 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/6d2a06a0-e76d-469a-bf34-4d32dd8b0b84-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-8c42n\" (UID: \"6d2a06a0-e76d-469a-bf34-4d32dd8b0b84\") " pod="openstack/ssh-known-hosts-edpm-deployment-8c42n" Dec 04 18:05:35 crc kubenswrapper[4631]: I1204 18:05:35.071839 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/6d2a06a0-e76d-469a-bf34-4d32dd8b0b84-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-8c42n\" (UID: \"6d2a06a0-e76d-469a-bf34-4d32dd8b0b84\") " pod="openstack/ssh-known-hosts-edpm-deployment-8c42n" Dec 04 18:05:35 crc kubenswrapper[4631]: I1204 18:05:35.072075 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtd98\" (UniqueName: \"kubernetes.io/projected/6d2a06a0-e76d-469a-bf34-4d32dd8b0b84-kube-api-access-rtd98\") pod \"ssh-known-hosts-edpm-deployment-8c42n\" (UID: \"6d2a06a0-e76d-469a-bf34-4d32dd8b0b84\") " pod="openstack/ssh-known-hosts-edpm-deployment-8c42n" Dec 04 18:05:35 crc kubenswrapper[4631]: I1204 18:05:35.173846 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/6d2a06a0-e76d-469a-bf34-4d32dd8b0b84-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-8c42n\" (UID: \"6d2a06a0-e76d-469a-bf34-4d32dd8b0b84\") " pod="openstack/ssh-known-hosts-edpm-deployment-8c42n" Dec 04 18:05:35 crc kubenswrapper[4631]: I1204 18:05:35.173966 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/6d2a06a0-e76d-469a-bf34-4d32dd8b0b84-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-8c42n\" (UID: \"6d2a06a0-e76d-469a-bf34-4d32dd8b0b84\") " pod="openstack/ssh-known-hosts-edpm-deployment-8c42n" Dec 04 18:05:35 crc kubenswrapper[4631]: I1204 18:05:35.174053 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtd98\" (UniqueName: \"kubernetes.io/projected/6d2a06a0-e76d-469a-bf34-4d32dd8b0b84-kube-api-access-rtd98\") pod \"ssh-known-hosts-edpm-deployment-8c42n\" (UID: \"6d2a06a0-e76d-469a-bf34-4d32dd8b0b84\") " pod="openstack/ssh-known-hosts-edpm-deployment-8c42n" Dec 04 18:05:35 crc kubenswrapper[4631]: I1204 18:05:35.179106 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/6d2a06a0-e76d-469a-bf34-4d32dd8b0b84-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-8c42n\" (UID: \"6d2a06a0-e76d-469a-bf34-4d32dd8b0b84\") " pod="openstack/ssh-known-hosts-edpm-deployment-8c42n" Dec 04 18:05:35 crc kubenswrapper[4631]: I1204 18:05:35.179176 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/6d2a06a0-e76d-469a-bf34-4d32dd8b0b84-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-8c42n\" (UID: \"6d2a06a0-e76d-469a-bf34-4d32dd8b0b84\") " pod="openstack/ssh-known-hosts-edpm-deployment-8c42n" Dec 04 18:05:35 crc kubenswrapper[4631]: I1204 18:05:35.195012 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtd98\" (UniqueName: \"kubernetes.io/projected/6d2a06a0-e76d-469a-bf34-4d32dd8b0b84-kube-api-access-rtd98\") pod \"ssh-known-hosts-edpm-deployment-8c42n\" (UID: \"6d2a06a0-e76d-469a-bf34-4d32dd8b0b84\") " pod="openstack/ssh-known-hosts-edpm-deployment-8c42n" Dec 04 18:05:35 crc kubenswrapper[4631]: I1204 18:05:35.286618 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8c42n" Dec 04 18:05:35 crc kubenswrapper[4631]: I1204 18:05:35.851102 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-8c42n"] Dec 04 18:05:35 crc kubenswrapper[4631]: I1204 18:05:35.855220 4631 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 18:05:35 crc kubenswrapper[4631]: I1204 18:05:35.895806 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8c42n" event={"ID":"6d2a06a0-e76d-469a-bf34-4d32dd8b0b84","Type":"ContainerStarted","Data":"52c9767d3e48df3c6a5c82af513a4962e55d3c6a64f259bed19ddb8d1bd24002"} Dec 04 18:05:36 crc kubenswrapper[4631]: I1204 18:05:36.907024 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8c42n" event={"ID":"6d2a06a0-e76d-469a-bf34-4d32dd8b0b84","Type":"ContainerStarted","Data":"321d270c6e7502bb47e93f832b5eb6be91014783dadeb06989572146767342d1"} Dec 04 18:05:37 crc kubenswrapper[4631]: I1204 18:05:37.239733 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:05:37 crc kubenswrapper[4631]: E1204 18:05:37.239984 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:05:44 crc kubenswrapper[4631]: I1204 18:05:44.978434 4631 generic.go:334] "Generic (PLEG): container finished" podID="6d2a06a0-e76d-469a-bf34-4d32dd8b0b84" containerID="321d270c6e7502bb47e93f832b5eb6be91014783dadeb06989572146767342d1" exitCode=0 Dec 04 18:05:44 crc kubenswrapper[4631]: I1204 18:05:44.978489 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8c42n" event={"ID":"6d2a06a0-e76d-469a-bf34-4d32dd8b0b84","Type":"ContainerDied","Data":"321d270c6e7502bb47e93f832b5eb6be91014783dadeb06989572146767342d1"} Dec 04 18:05:46 crc kubenswrapper[4631]: I1204 18:05:46.414096 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8c42n" Dec 04 18:05:46 crc kubenswrapper[4631]: I1204 18:05:46.501833 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/6d2a06a0-e76d-469a-bf34-4d32dd8b0b84-inventory-0\") pod \"6d2a06a0-e76d-469a-bf34-4d32dd8b0b84\" (UID: \"6d2a06a0-e76d-469a-bf34-4d32dd8b0b84\") " Dec 04 18:05:46 crc kubenswrapper[4631]: I1204 18:05:46.501888 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/6d2a06a0-e76d-469a-bf34-4d32dd8b0b84-ssh-key-openstack-edpm-ipam\") pod \"6d2a06a0-e76d-469a-bf34-4d32dd8b0b84\" (UID: \"6d2a06a0-e76d-469a-bf34-4d32dd8b0b84\") " Dec 04 18:05:46 crc kubenswrapper[4631]: I1204 18:05:46.502017 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rtd98\" (UniqueName: \"kubernetes.io/projected/6d2a06a0-e76d-469a-bf34-4d32dd8b0b84-kube-api-access-rtd98\") pod \"6d2a06a0-e76d-469a-bf34-4d32dd8b0b84\" (UID: \"6d2a06a0-e76d-469a-bf34-4d32dd8b0b84\") " Dec 04 18:05:46 crc kubenswrapper[4631]: I1204 18:05:46.506951 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d2a06a0-e76d-469a-bf34-4d32dd8b0b84-kube-api-access-rtd98" (OuterVolumeSpecName: "kube-api-access-rtd98") pod "6d2a06a0-e76d-469a-bf34-4d32dd8b0b84" (UID: "6d2a06a0-e76d-469a-bf34-4d32dd8b0b84"). InnerVolumeSpecName "kube-api-access-rtd98". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:05:46 crc kubenswrapper[4631]: I1204 18:05:46.530298 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d2a06a0-e76d-469a-bf34-4d32dd8b0b84-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "6d2a06a0-e76d-469a-bf34-4d32dd8b0b84" (UID: "6d2a06a0-e76d-469a-bf34-4d32dd8b0b84"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:05:46 crc kubenswrapper[4631]: I1204 18:05:46.535000 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d2a06a0-e76d-469a-bf34-4d32dd8b0b84-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "6d2a06a0-e76d-469a-bf34-4d32dd8b0b84" (UID: "6d2a06a0-e76d-469a-bf34-4d32dd8b0b84"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:05:46 crc kubenswrapper[4631]: I1204 18:05:46.604497 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rtd98\" (UniqueName: \"kubernetes.io/projected/6d2a06a0-e76d-469a-bf34-4d32dd8b0b84-kube-api-access-rtd98\") on node \"crc\" DevicePath \"\"" Dec 04 18:05:46 crc kubenswrapper[4631]: I1204 18:05:46.604531 4631 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/6d2a06a0-e76d-469a-bf34-4d32dd8b0b84-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 04 18:05:46 crc kubenswrapper[4631]: I1204 18:05:46.604541 4631 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/6d2a06a0-e76d-469a-bf34-4d32dd8b0b84-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 04 18:05:46 crc kubenswrapper[4631]: I1204 18:05:46.994476 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8c42n" event={"ID":"6d2a06a0-e76d-469a-bf34-4d32dd8b0b84","Type":"ContainerDied","Data":"52c9767d3e48df3c6a5c82af513a4962e55d3c6a64f259bed19ddb8d1bd24002"} Dec 04 18:05:46 crc kubenswrapper[4631]: I1204 18:05:46.994911 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52c9767d3e48df3c6a5c82af513a4962e55d3c6a64f259bed19ddb8d1bd24002" Dec 04 18:05:46 crc kubenswrapper[4631]: I1204 18:05:46.994551 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8c42n" Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.139122 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d"] Dec 04 18:05:47 crc kubenswrapper[4631]: E1204 18:05:47.139517 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d2a06a0-e76d-469a-bf34-4d32dd8b0b84" containerName="ssh-known-hosts-edpm-deployment" Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.139535 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d2a06a0-e76d-469a-bf34-4d32dd8b0b84" containerName="ssh-known-hosts-edpm-deployment" Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.139735 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d2a06a0-e76d-469a-bf34-4d32dd8b0b84" containerName="ssh-known-hosts-edpm-deployment" Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.140295 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d" Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.150076 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d"] Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.150600 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.151019 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.151646 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-9284p" Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.151872 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.212093 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c331e4d1-1da9-4a7f-bd67-f24a4c76b971-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-p2h9d\" (UID: \"c331e4d1-1da9-4a7f-bd67-f24a4c76b971\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d" Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.212342 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4m7v8\" (UniqueName: \"kubernetes.io/projected/c331e4d1-1da9-4a7f-bd67-f24a4c76b971-kube-api-access-4m7v8\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-p2h9d\" (UID: \"c331e4d1-1da9-4a7f-bd67-f24a4c76b971\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d" Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.212485 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c331e4d1-1da9-4a7f-bd67-f24a4c76b971-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-p2h9d\" (UID: \"c331e4d1-1da9-4a7f-bd67-f24a4c76b971\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d" Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.313552 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c331e4d1-1da9-4a7f-bd67-f24a4c76b971-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-p2h9d\" (UID: \"c331e4d1-1da9-4a7f-bd67-f24a4c76b971\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d" Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.313594 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4m7v8\" (UniqueName: \"kubernetes.io/projected/c331e4d1-1da9-4a7f-bd67-f24a4c76b971-kube-api-access-4m7v8\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-p2h9d\" (UID: \"c331e4d1-1da9-4a7f-bd67-f24a4c76b971\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d" Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.313646 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c331e4d1-1da9-4a7f-bd67-f24a4c76b971-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-p2h9d\" (UID: \"c331e4d1-1da9-4a7f-bd67-f24a4c76b971\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d" Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.320222 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c331e4d1-1da9-4a7f-bd67-f24a4c76b971-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-p2h9d\" (UID: \"c331e4d1-1da9-4a7f-bd67-f24a4c76b971\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d" Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.320775 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c331e4d1-1da9-4a7f-bd67-f24a4c76b971-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-p2h9d\" (UID: \"c331e4d1-1da9-4a7f-bd67-f24a4c76b971\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d" Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.331959 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4m7v8\" (UniqueName: \"kubernetes.io/projected/c331e4d1-1da9-4a7f-bd67-f24a4c76b971-kube-api-access-4m7v8\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-p2h9d\" (UID: \"c331e4d1-1da9-4a7f-bd67-f24a4c76b971\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d" Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.460252 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d" Dec 04 18:05:47 crc kubenswrapper[4631]: I1204 18:05:47.990813 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d"] Dec 04 18:05:48 crc kubenswrapper[4631]: I1204 18:05:48.010529 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d" event={"ID":"c331e4d1-1da9-4a7f-bd67-f24a4c76b971","Type":"ContainerStarted","Data":"bdea94f10fefbba23b7f28d7ce417addf242acc18bfab1c2c3c1c8f957faafbf"} Dec 04 18:05:48 crc kubenswrapper[4631]: I1204 18:05:48.140567 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-v8kc5"] Dec 04 18:05:48 crc kubenswrapper[4631]: I1204 18:05:48.142796 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v8kc5" Dec 04 18:05:48 crc kubenswrapper[4631]: I1204 18:05:48.151596 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v8kc5"] Dec 04 18:05:48 crc kubenswrapper[4631]: I1204 18:05:48.230500 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f175691-4966-4d0a-8a3a-418859733db9-utilities\") pod \"redhat-operators-v8kc5\" (UID: \"2f175691-4966-4d0a-8a3a-418859733db9\") " pod="openshift-marketplace/redhat-operators-v8kc5" Dec 04 18:05:48 crc kubenswrapper[4631]: I1204 18:05:48.231880 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f175691-4966-4d0a-8a3a-418859733db9-catalog-content\") pod \"redhat-operators-v8kc5\" (UID: \"2f175691-4966-4d0a-8a3a-418859733db9\") " pod="openshift-marketplace/redhat-operators-v8kc5" Dec 04 18:05:48 crc kubenswrapper[4631]: I1204 18:05:48.232301 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h95b9\" (UniqueName: \"kubernetes.io/projected/2f175691-4966-4d0a-8a3a-418859733db9-kube-api-access-h95b9\") pod \"redhat-operators-v8kc5\" (UID: \"2f175691-4966-4d0a-8a3a-418859733db9\") " pod="openshift-marketplace/redhat-operators-v8kc5" Dec 04 18:05:48 crc kubenswrapper[4631]: I1204 18:05:48.333228 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f175691-4966-4d0a-8a3a-418859733db9-catalog-content\") pod \"redhat-operators-v8kc5\" (UID: \"2f175691-4966-4d0a-8a3a-418859733db9\") " pod="openshift-marketplace/redhat-operators-v8kc5" Dec 04 18:05:48 crc kubenswrapper[4631]: I1204 18:05:48.333352 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h95b9\" (UniqueName: \"kubernetes.io/projected/2f175691-4966-4d0a-8a3a-418859733db9-kube-api-access-h95b9\") pod \"redhat-operators-v8kc5\" (UID: \"2f175691-4966-4d0a-8a3a-418859733db9\") " pod="openshift-marketplace/redhat-operators-v8kc5" Dec 04 18:05:48 crc kubenswrapper[4631]: I1204 18:05:48.333425 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f175691-4966-4d0a-8a3a-418859733db9-utilities\") pod \"redhat-operators-v8kc5\" (UID: \"2f175691-4966-4d0a-8a3a-418859733db9\") " pod="openshift-marketplace/redhat-operators-v8kc5" Dec 04 18:05:48 crc kubenswrapper[4631]: I1204 18:05:48.333824 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f175691-4966-4d0a-8a3a-418859733db9-catalog-content\") pod \"redhat-operators-v8kc5\" (UID: \"2f175691-4966-4d0a-8a3a-418859733db9\") " pod="openshift-marketplace/redhat-operators-v8kc5" Dec 04 18:05:48 crc kubenswrapper[4631]: I1204 18:05:48.333970 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f175691-4966-4d0a-8a3a-418859733db9-utilities\") pod \"redhat-operators-v8kc5\" (UID: \"2f175691-4966-4d0a-8a3a-418859733db9\") " pod="openshift-marketplace/redhat-operators-v8kc5" Dec 04 18:05:48 crc kubenswrapper[4631]: I1204 18:05:48.365425 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h95b9\" (UniqueName: \"kubernetes.io/projected/2f175691-4966-4d0a-8a3a-418859733db9-kube-api-access-h95b9\") pod \"redhat-operators-v8kc5\" (UID: \"2f175691-4966-4d0a-8a3a-418859733db9\") " pod="openshift-marketplace/redhat-operators-v8kc5" Dec 04 18:05:48 crc kubenswrapper[4631]: I1204 18:05:48.466917 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v8kc5" Dec 04 18:05:49 crc kubenswrapper[4631]: I1204 18:05:49.042453 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d" event={"ID":"c331e4d1-1da9-4a7f-bd67-f24a4c76b971","Type":"ContainerStarted","Data":"5bee0f4d9a201e96b9b3631b0e2c28bb4e50e71f06cc027a8fe73e715139497f"} Dec 04 18:05:49 crc kubenswrapper[4631]: I1204 18:05:49.075557 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v8kc5"] Dec 04 18:05:49 crc kubenswrapper[4631]: I1204 18:05:49.083469 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d" podStartSLOduration=1.914888119 podStartE2EDuration="2.08344605s" podCreationTimestamp="2025-12-04 18:05:47 +0000 UTC" firstStartedPulling="2025-12-04 18:05:47.998128515 +0000 UTC m=+2278.030370513" lastFinishedPulling="2025-12-04 18:05:48.166686446 +0000 UTC m=+2278.198928444" observedRunningTime="2025-12-04 18:05:49.063279641 +0000 UTC m=+2279.095521639" watchObservedRunningTime="2025-12-04 18:05:49.08344605 +0000 UTC m=+2279.115688048" Dec 04 18:05:50 crc kubenswrapper[4631]: I1204 18:05:50.053708 4631 generic.go:334] "Generic (PLEG): container finished" podID="2f175691-4966-4d0a-8a3a-418859733db9" containerID="0c703758d186a4006ffa209285a98d25878f7d490d875dd0ac4382f40abc5d1f" exitCode=0 Dec 04 18:05:50 crc kubenswrapper[4631]: I1204 18:05:50.053747 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8kc5" event={"ID":"2f175691-4966-4d0a-8a3a-418859733db9","Type":"ContainerDied","Data":"0c703758d186a4006ffa209285a98d25878f7d490d875dd0ac4382f40abc5d1f"} Dec 04 18:05:50 crc kubenswrapper[4631]: I1204 18:05:50.054038 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8kc5" event={"ID":"2f175691-4966-4d0a-8a3a-418859733db9","Type":"ContainerStarted","Data":"25328f8dc3dd3b5613a34e38d3904648bfe0b5a0a05ce6ef689ae54eb88ff0b8"} Dec 04 18:05:51 crc kubenswrapper[4631]: I1204 18:05:51.064668 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8kc5" event={"ID":"2f175691-4966-4d0a-8a3a-418859733db9","Type":"ContainerStarted","Data":"0a686de84215d9004e404ae1204ac4599a4b782d52ab3e4d60a2c4151c21fbb6"} Dec 04 18:05:52 crc kubenswrapper[4631]: I1204 18:05:52.240182 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:05:52 crc kubenswrapper[4631]: E1204 18:05:52.241344 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:05:55 crc kubenswrapper[4631]: I1204 18:05:55.101786 4631 generic.go:334] "Generic (PLEG): container finished" podID="2f175691-4966-4d0a-8a3a-418859733db9" containerID="0a686de84215d9004e404ae1204ac4599a4b782d52ab3e4d60a2c4151c21fbb6" exitCode=0 Dec 04 18:05:55 crc kubenswrapper[4631]: I1204 18:05:55.101892 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8kc5" event={"ID":"2f175691-4966-4d0a-8a3a-418859733db9","Type":"ContainerDied","Data":"0a686de84215d9004e404ae1204ac4599a4b782d52ab3e4d60a2c4151c21fbb6"} Dec 04 18:05:56 crc kubenswrapper[4631]: I1204 18:05:56.111442 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8kc5" event={"ID":"2f175691-4966-4d0a-8a3a-418859733db9","Type":"ContainerStarted","Data":"5de268cd134865480140bcad4d15948d37c5edcb691727b50311b8cd3e7d8363"} Dec 04 18:05:56 crc kubenswrapper[4631]: I1204 18:05:56.127685 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-v8kc5" podStartSLOduration=2.642384481 podStartE2EDuration="8.127655965s" podCreationTimestamp="2025-12-04 18:05:48 +0000 UTC" firstStartedPulling="2025-12-04 18:05:50.055983782 +0000 UTC m=+2280.088225780" lastFinishedPulling="2025-12-04 18:05:55.541255266 +0000 UTC m=+2285.573497264" observedRunningTime="2025-12-04 18:05:56.127502211 +0000 UTC m=+2286.159744209" watchObservedRunningTime="2025-12-04 18:05:56.127655965 +0000 UTC m=+2286.159897963" Dec 04 18:05:58 crc kubenswrapper[4631]: I1204 18:05:58.128330 4631 generic.go:334] "Generic (PLEG): container finished" podID="c331e4d1-1da9-4a7f-bd67-f24a4c76b971" containerID="5bee0f4d9a201e96b9b3631b0e2c28bb4e50e71f06cc027a8fe73e715139497f" exitCode=0 Dec 04 18:05:58 crc kubenswrapper[4631]: I1204 18:05:58.128410 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d" event={"ID":"c331e4d1-1da9-4a7f-bd67-f24a4c76b971","Type":"ContainerDied","Data":"5bee0f4d9a201e96b9b3631b0e2c28bb4e50e71f06cc027a8fe73e715139497f"} Dec 04 18:05:58 crc kubenswrapper[4631]: I1204 18:05:58.467645 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-v8kc5" Dec 04 18:05:58 crc kubenswrapper[4631]: I1204 18:05:58.467688 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-v8kc5" Dec 04 18:05:59 crc kubenswrapper[4631]: I1204 18:05:59.508678 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-v8kc5" podUID="2f175691-4966-4d0a-8a3a-418859733db9" containerName="registry-server" probeResult="failure" output=< Dec 04 18:05:59 crc kubenswrapper[4631]: timeout: failed to connect service ":50051" within 1s Dec 04 18:05:59 crc kubenswrapper[4631]: > Dec 04 18:05:59 crc kubenswrapper[4631]: I1204 18:05:59.546728 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d" Dec 04 18:05:59 crc kubenswrapper[4631]: I1204 18:05:59.579713 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4m7v8\" (UniqueName: \"kubernetes.io/projected/c331e4d1-1da9-4a7f-bd67-f24a4c76b971-kube-api-access-4m7v8\") pod \"c331e4d1-1da9-4a7f-bd67-f24a4c76b971\" (UID: \"c331e4d1-1da9-4a7f-bd67-f24a4c76b971\") " Dec 04 18:05:59 crc kubenswrapper[4631]: I1204 18:05:59.579805 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c331e4d1-1da9-4a7f-bd67-f24a4c76b971-ssh-key\") pod \"c331e4d1-1da9-4a7f-bd67-f24a4c76b971\" (UID: \"c331e4d1-1da9-4a7f-bd67-f24a4c76b971\") " Dec 04 18:05:59 crc kubenswrapper[4631]: I1204 18:05:59.579857 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c331e4d1-1da9-4a7f-bd67-f24a4c76b971-inventory\") pod \"c331e4d1-1da9-4a7f-bd67-f24a4c76b971\" (UID: \"c331e4d1-1da9-4a7f-bd67-f24a4c76b971\") " Dec 04 18:05:59 crc kubenswrapper[4631]: I1204 18:05:59.585526 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c331e4d1-1da9-4a7f-bd67-f24a4c76b971-kube-api-access-4m7v8" (OuterVolumeSpecName: "kube-api-access-4m7v8") pod "c331e4d1-1da9-4a7f-bd67-f24a4c76b971" (UID: "c331e4d1-1da9-4a7f-bd67-f24a4c76b971"). InnerVolumeSpecName "kube-api-access-4m7v8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:05:59 crc kubenswrapper[4631]: I1204 18:05:59.613799 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c331e4d1-1da9-4a7f-bd67-f24a4c76b971-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c331e4d1-1da9-4a7f-bd67-f24a4c76b971" (UID: "c331e4d1-1da9-4a7f-bd67-f24a4c76b971"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:05:59 crc kubenswrapper[4631]: I1204 18:05:59.626965 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c331e4d1-1da9-4a7f-bd67-f24a4c76b971-inventory" (OuterVolumeSpecName: "inventory") pod "c331e4d1-1da9-4a7f-bd67-f24a4c76b971" (UID: "c331e4d1-1da9-4a7f-bd67-f24a4c76b971"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:05:59 crc kubenswrapper[4631]: I1204 18:05:59.681914 4631 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c331e4d1-1da9-4a7f-bd67-f24a4c76b971-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 18:05:59 crc kubenswrapper[4631]: I1204 18:05:59.681946 4631 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c331e4d1-1da9-4a7f-bd67-f24a4c76b971-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 18:05:59 crc kubenswrapper[4631]: I1204 18:05:59.681958 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4m7v8\" (UniqueName: \"kubernetes.io/projected/c331e4d1-1da9-4a7f-bd67-f24a4c76b971-kube-api-access-4m7v8\") on node \"crc\" DevicePath \"\"" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.146615 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d" event={"ID":"c331e4d1-1da9-4a7f-bd67-f24a4c76b971","Type":"ContainerDied","Data":"bdea94f10fefbba23b7f28d7ce417addf242acc18bfab1c2c3c1c8f957faafbf"} Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.147026 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bdea94f10fefbba23b7f28d7ce417addf242acc18bfab1c2c3c1c8f957faafbf" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.146667 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-p2h9d" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.232678 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj"] Dec 04 18:06:00 crc kubenswrapper[4631]: E1204 18:06:00.233148 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c331e4d1-1da9-4a7f-bd67-f24a4c76b971" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.233175 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="c331e4d1-1da9-4a7f-bd67-f24a4c76b971" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.233493 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="c331e4d1-1da9-4a7f-bd67-f24a4c76b971" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.234239 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.244812 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.245035 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.245173 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-9284p" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.245234 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.253395 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj"] Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.397259 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2ec68e5-0f90-46f3-b0f7-1fdc8956c306-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj\" (UID: \"c2ec68e5-0f90-46f3-b0f7-1fdc8956c306\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.397435 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xf9vm\" (UniqueName: \"kubernetes.io/projected/c2ec68e5-0f90-46f3-b0f7-1fdc8956c306-kube-api-access-xf9vm\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj\" (UID: \"c2ec68e5-0f90-46f3-b0f7-1fdc8956c306\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.397508 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2ec68e5-0f90-46f3-b0f7-1fdc8956c306-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj\" (UID: \"c2ec68e5-0f90-46f3-b0f7-1fdc8956c306\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.499460 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xf9vm\" (UniqueName: \"kubernetes.io/projected/c2ec68e5-0f90-46f3-b0f7-1fdc8956c306-kube-api-access-xf9vm\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj\" (UID: \"c2ec68e5-0f90-46f3-b0f7-1fdc8956c306\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.499827 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2ec68e5-0f90-46f3-b0f7-1fdc8956c306-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj\" (UID: \"c2ec68e5-0f90-46f3-b0f7-1fdc8956c306\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.500123 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2ec68e5-0f90-46f3-b0f7-1fdc8956c306-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj\" (UID: \"c2ec68e5-0f90-46f3-b0f7-1fdc8956c306\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.505354 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2ec68e5-0f90-46f3-b0f7-1fdc8956c306-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj\" (UID: \"c2ec68e5-0f90-46f3-b0f7-1fdc8956c306\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.507910 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2ec68e5-0f90-46f3-b0f7-1fdc8956c306-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj\" (UID: \"c2ec68e5-0f90-46f3-b0f7-1fdc8956c306\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.517743 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xf9vm\" (UniqueName: \"kubernetes.io/projected/c2ec68e5-0f90-46f3-b0f7-1fdc8956c306-kube-api-access-xf9vm\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj\" (UID: \"c2ec68e5-0f90-46f3-b0f7-1fdc8956c306\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj" Dec 04 18:06:00 crc kubenswrapper[4631]: I1204 18:06:00.556841 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj" Dec 04 18:06:01 crc kubenswrapper[4631]: I1204 18:06:01.084436 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj"] Dec 04 18:06:01 crc kubenswrapper[4631]: I1204 18:06:01.155865 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj" event={"ID":"c2ec68e5-0f90-46f3-b0f7-1fdc8956c306","Type":"ContainerStarted","Data":"b636a7b227b2ff02da38cb6be9e5ae4606ff938188ca95e26952b564025f885f"} Dec 04 18:06:02 crc kubenswrapper[4631]: I1204 18:06:02.163997 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj" event={"ID":"c2ec68e5-0f90-46f3-b0f7-1fdc8956c306","Type":"ContainerStarted","Data":"4d292fcc5b4a8cbcbbcf71d1a79dc5d3d83ee8dca23c0c08e00e40654b731426"} Dec 04 18:06:02 crc kubenswrapper[4631]: I1204 18:06:02.199005 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj" podStartSLOduration=2.040121771 podStartE2EDuration="2.198983208s" podCreationTimestamp="2025-12-04 18:06:00 +0000 UTC" firstStartedPulling="2025-12-04 18:06:01.092902539 +0000 UTC m=+2291.125144537" lastFinishedPulling="2025-12-04 18:06:01.251763976 +0000 UTC m=+2291.284005974" observedRunningTime="2025-12-04 18:06:02.181265131 +0000 UTC m=+2292.213507129" watchObservedRunningTime="2025-12-04 18:06:02.198983208 +0000 UTC m=+2292.231225206" Dec 04 18:06:07 crc kubenswrapper[4631]: I1204 18:06:07.240012 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:06:07 crc kubenswrapper[4631]: E1204 18:06:07.242493 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:06:08 crc kubenswrapper[4631]: I1204 18:06:08.514585 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-v8kc5" Dec 04 18:06:08 crc kubenswrapper[4631]: I1204 18:06:08.569784 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-v8kc5" Dec 04 18:06:08 crc kubenswrapper[4631]: I1204 18:06:08.752247 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v8kc5"] Dec 04 18:06:10 crc kubenswrapper[4631]: I1204 18:06:10.236076 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-v8kc5" podUID="2f175691-4966-4d0a-8a3a-418859733db9" containerName="registry-server" containerID="cri-o://5de268cd134865480140bcad4d15948d37c5edcb691727b50311b8cd3e7d8363" gracePeriod=2 Dec 04 18:06:10 crc kubenswrapper[4631]: I1204 18:06:10.678982 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v8kc5" Dec 04 18:06:10 crc kubenswrapper[4631]: I1204 18:06:10.688101 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h95b9\" (UniqueName: \"kubernetes.io/projected/2f175691-4966-4d0a-8a3a-418859733db9-kube-api-access-h95b9\") pod \"2f175691-4966-4d0a-8a3a-418859733db9\" (UID: \"2f175691-4966-4d0a-8a3a-418859733db9\") " Dec 04 18:06:10 crc kubenswrapper[4631]: I1204 18:06:10.688976 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f175691-4966-4d0a-8a3a-418859733db9-utilities\") pod \"2f175691-4966-4d0a-8a3a-418859733db9\" (UID: \"2f175691-4966-4d0a-8a3a-418859733db9\") " Dec 04 18:06:10 crc kubenswrapper[4631]: I1204 18:06:10.689188 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f175691-4966-4d0a-8a3a-418859733db9-catalog-content\") pod \"2f175691-4966-4d0a-8a3a-418859733db9\" (UID: \"2f175691-4966-4d0a-8a3a-418859733db9\") " Dec 04 18:06:10 crc kubenswrapper[4631]: I1204 18:06:10.690647 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f175691-4966-4d0a-8a3a-418859733db9-utilities" (OuterVolumeSpecName: "utilities") pod "2f175691-4966-4d0a-8a3a-418859733db9" (UID: "2f175691-4966-4d0a-8a3a-418859733db9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:06:10 crc kubenswrapper[4631]: I1204 18:06:10.693759 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f175691-4966-4d0a-8a3a-418859733db9-kube-api-access-h95b9" (OuterVolumeSpecName: "kube-api-access-h95b9") pod "2f175691-4966-4d0a-8a3a-418859733db9" (UID: "2f175691-4966-4d0a-8a3a-418859733db9"). InnerVolumeSpecName "kube-api-access-h95b9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:06:10 crc kubenswrapper[4631]: I1204 18:06:10.790825 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h95b9\" (UniqueName: \"kubernetes.io/projected/2f175691-4966-4d0a-8a3a-418859733db9-kube-api-access-h95b9\") on node \"crc\" DevicePath \"\"" Dec 04 18:06:10 crc kubenswrapper[4631]: I1204 18:06:10.791145 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f175691-4966-4d0a-8a3a-418859733db9-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:06:10 crc kubenswrapper[4631]: I1204 18:06:10.828317 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f175691-4966-4d0a-8a3a-418859733db9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2f175691-4966-4d0a-8a3a-418859733db9" (UID: "2f175691-4966-4d0a-8a3a-418859733db9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:06:10 crc kubenswrapper[4631]: I1204 18:06:10.892627 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f175691-4966-4d0a-8a3a-418859733db9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:06:11 crc kubenswrapper[4631]: I1204 18:06:11.248320 4631 generic.go:334] "Generic (PLEG): container finished" podID="2f175691-4966-4d0a-8a3a-418859733db9" containerID="5de268cd134865480140bcad4d15948d37c5edcb691727b50311b8cd3e7d8363" exitCode=0 Dec 04 18:06:11 crc kubenswrapper[4631]: I1204 18:06:11.248398 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8kc5" event={"ID":"2f175691-4966-4d0a-8a3a-418859733db9","Type":"ContainerDied","Data":"5de268cd134865480140bcad4d15948d37c5edcb691727b50311b8cd3e7d8363"} Dec 04 18:06:11 crc kubenswrapper[4631]: I1204 18:06:11.248429 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8kc5" event={"ID":"2f175691-4966-4d0a-8a3a-418859733db9","Type":"ContainerDied","Data":"25328f8dc3dd3b5613a34e38d3904648bfe0b5a0a05ce6ef689ae54eb88ff0b8"} Dec 04 18:06:11 crc kubenswrapper[4631]: I1204 18:06:11.248430 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v8kc5" Dec 04 18:06:11 crc kubenswrapper[4631]: I1204 18:06:11.248449 4631 scope.go:117] "RemoveContainer" containerID="5de268cd134865480140bcad4d15948d37c5edcb691727b50311b8cd3e7d8363" Dec 04 18:06:11 crc kubenswrapper[4631]: I1204 18:06:11.272860 4631 scope.go:117] "RemoveContainer" containerID="0a686de84215d9004e404ae1204ac4599a4b782d52ab3e4d60a2c4151c21fbb6" Dec 04 18:06:11 crc kubenswrapper[4631]: I1204 18:06:11.289517 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v8kc5"] Dec 04 18:06:11 crc kubenswrapper[4631]: I1204 18:06:11.296950 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-v8kc5"] Dec 04 18:06:11 crc kubenswrapper[4631]: I1204 18:06:11.305984 4631 scope.go:117] "RemoveContainer" containerID="0c703758d186a4006ffa209285a98d25878f7d490d875dd0ac4382f40abc5d1f" Dec 04 18:06:11 crc kubenswrapper[4631]: I1204 18:06:11.348507 4631 scope.go:117] "RemoveContainer" containerID="5de268cd134865480140bcad4d15948d37c5edcb691727b50311b8cd3e7d8363" Dec 04 18:06:11 crc kubenswrapper[4631]: E1204 18:06:11.349018 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5de268cd134865480140bcad4d15948d37c5edcb691727b50311b8cd3e7d8363\": container with ID starting with 5de268cd134865480140bcad4d15948d37c5edcb691727b50311b8cd3e7d8363 not found: ID does not exist" containerID="5de268cd134865480140bcad4d15948d37c5edcb691727b50311b8cd3e7d8363" Dec 04 18:06:11 crc kubenswrapper[4631]: I1204 18:06:11.349078 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5de268cd134865480140bcad4d15948d37c5edcb691727b50311b8cd3e7d8363"} err="failed to get container status \"5de268cd134865480140bcad4d15948d37c5edcb691727b50311b8cd3e7d8363\": rpc error: code = NotFound desc = could not find container \"5de268cd134865480140bcad4d15948d37c5edcb691727b50311b8cd3e7d8363\": container with ID starting with 5de268cd134865480140bcad4d15948d37c5edcb691727b50311b8cd3e7d8363 not found: ID does not exist" Dec 04 18:06:11 crc kubenswrapper[4631]: I1204 18:06:11.349114 4631 scope.go:117] "RemoveContainer" containerID="0a686de84215d9004e404ae1204ac4599a4b782d52ab3e4d60a2c4151c21fbb6" Dec 04 18:06:11 crc kubenswrapper[4631]: E1204 18:06:11.349534 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a686de84215d9004e404ae1204ac4599a4b782d52ab3e4d60a2c4151c21fbb6\": container with ID starting with 0a686de84215d9004e404ae1204ac4599a4b782d52ab3e4d60a2c4151c21fbb6 not found: ID does not exist" containerID="0a686de84215d9004e404ae1204ac4599a4b782d52ab3e4d60a2c4151c21fbb6" Dec 04 18:06:11 crc kubenswrapper[4631]: I1204 18:06:11.349569 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a686de84215d9004e404ae1204ac4599a4b782d52ab3e4d60a2c4151c21fbb6"} err="failed to get container status \"0a686de84215d9004e404ae1204ac4599a4b782d52ab3e4d60a2c4151c21fbb6\": rpc error: code = NotFound desc = could not find container \"0a686de84215d9004e404ae1204ac4599a4b782d52ab3e4d60a2c4151c21fbb6\": container with ID starting with 0a686de84215d9004e404ae1204ac4599a4b782d52ab3e4d60a2c4151c21fbb6 not found: ID does not exist" Dec 04 18:06:11 crc kubenswrapper[4631]: I1204 18:06:11.349595 4631 scope.go:117] "RemoveContainer" containerID="0c703758d186a4006ffa209285a98d25878f7d490d875dd0ac4382f40abc5d1f" Dec 04 18:06:11 crc kubenswrapper[4631]: E1204 18:06:11.349829 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c703758d186a4006ffa209285a98d25878f7d490d875dd0ac4382f40abc5d1f\": container with ID starting with 0c703758d186a4006ffa209285a98d25878f7d490d875dd0ac4382f40abc5d1f not found: ID does not exist" containerID="0c703758d186a4006ffa209285a98d25878f7d490d875dd0ac4382f40abc5d1f" Dec 04 18:06:11 crc kubenswrapper[4631]: I1204 18:06:11.349848 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c703758d186a4006ffa209285a98d25878f7d490d875dd0ac4382f40abc5d1f"} err="failed to get container status \"0c703758d186a4006ffa209285a98d25878f7d490d875dd0ac4382f40abc5d1f\": rpc error: code = NotFound desc = could not find container \"0c703758d186a4006ffa209285a98d25878f7d490d875dd0ac4382f40abc5d1f\": container with ID starting with 0c703758d186a4006ffa209285a98d25878f7d490d875dd0ac4382f40abc5d1f not found: ID does not exist" Dec 04 18:06:12 crc kubenswrapper[4631]: I1204 18:06:12.250040 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f175691-4966-4d0a-8a3a-418859733db9" path="/var/lib/kubelet/pods/2f175691-4966-4d0a-8a3a-418859733db9/volumes" Dec 04 18:06:12 crc kubenswrapper[4631]: I1204 18:06:12.260929 4631 generic.go:334] "Generic (PLEG): container finished" podID="c2ec68e5-0f90-46f3-b0f7-1fdc8956c306" containerID="4d292fcc5b4a8cbcbbcf71d1a79dc5d3d83ee8dca23c0c08e00e40654b731426" exitCode=0 Dec 04 18:06:12 crc kubenswrapper[4631]: I1204 18:06:12.260974 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj" event={"ID":"c2ec68e5-0f90-46f3-b0f7-1fdc8956c306","Type":"ContainerDied","Data":"4d292fcc5b4a8cbcbbcf71d1a79dc5d3d83ee8dca23c0c08e00e40654b731426"} Dec 04 18:06:13 crc kubenswrapper[4631]: I1204 18:06:13.692094 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj" Dec 04 18:06:13 crc kubenswrapper[4631]: I1204 18:06:13.752734 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2ec68e5-0f90-46f3-b0f7-1fdc8956c306-ssh-key\") pod \"c2ec68e5-0f90-46f3-b0f7-1fdc8956c306\" (UID: \"c2ec68e5-0f90-46f3-b0f7-1fdc8956c306\") " Dec 04 18:06:13 crc kubenswrapper[4631]: I1204 18:06:13.752811 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xf9vm\" (UniqueName: \"kubernetes.io/projected/c2ec68e5-0f90-46f3-b0f7-1fdc8956c306-kube-api-access-xf9vm\") pod \"c2ec68e5-0f90-46f3-b0f7-1fdc8956c306\" (UID: \"c2ec68e5-0f90-46f3-b0f7-1fdc8956c306\") " Dec 04 18:06:13 crc kubenswrapper[4631]: I1204 18:06:13.752840 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2ec68e5-0f90-46f3-b0f7-1fdc8956c306-inventory\") pod \"c2ec68e5-0f90-46f3-b0f7-1fdc8956c306\" (UID: \"c2ec68e5-0f90-46f3-b0f7-1fdc8956c306\") " Dec 04 18:06:13 crc kubenswrapper[4631]: I1204 18:06:13.758664 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2ec68e5-0f90-46f3-b0f7-1fdc8956c306-kube-api-access-xf9vm" (OuterVolumeSpecName: "kube-api-access-xf9vm") pod "c2ec68e5-0f90-46f3-b0f7-1fdc8956c306" (UID: "c2ec68e5-0f90-46f3-b0f7-1fdc8956c306"). InnerVolumeSpecName "kube-api-access-xf9vm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:06:13 crc kubenswrapper[4631]: I1204 18:06:13.782512 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2ec68e5-0f90-46f3-b0f7-1fdc8956c306-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c2ec68e5-0f90-46f3-b0f7-1fdc8956c306" (UID: "c2ec68e5-0f90-46f3-b0f7-1fdc8956c306"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:06:13 crc kubenswrapper[4631]: I1204 18:06:13.787189 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2ec68e5-0f90-46f3-b0f7-1fdc8956c306-inventory" (OuterVolumeSpecName: "inventory") pod "c2ec68e5-0f90-46f3-b0f7-1fdc8956c306" (UID: "c2ec68e5-0f90-46f3-b0f7-1fdc8956c306"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:06:13 crc kubenswrapper[4631]: I1204 18:06:13.856291 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xf9vm\" (UniqueName: \"kubernetes.io/projected/c2ec68e5-0f90-46f3-b0f7-1fdc8956c306-kube-api-access-xf9vm\") on node \"crc\" DevicePath \"\"" Dec 04 18:06:13 crc kubenswrapper[4631]: I1204 18:06:13.856681 4631 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2ec68e5-0f90-46f3-b0f7-1fdc8956c306-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 18:06:13 crc kubenswrapper[4631]: I1204 18:06:13.856747 4631 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2ec68e5-0f90-46f3-b0f7-1fdc8956c306-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.287652 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj" event={"ID":"c2ec68e5-0f90-46f3-b0f7-1fdc8956c306","Type":"ContainerDied","Data":"b636a7b227b2ff02da38cb6be9e5ae4606ff938188ca95e26952b564025f885f"} Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.287689 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b636a7b227b2ff02da38cb6be9e5ae4606ff938188ca95e26952b564025f885f" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.287702 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.362064 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv"] Dec 04 18:06:14 crc kubenswrapper[4631]: E1204 18:06:14.362484 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f175691-4966-4d0a-8a3a-418859733db9" containerName="extract-utilities" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.362508 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f175691-4966-4d0a-8a3a-418859733db9" containerName="extract-utilities" Dec 04 18:06:14 crc kubenswrapper[4631]: E1204 18:06:14.362532 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2ec68e5-0f90-46f3-b0f7-1fdc8956c306" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.362544 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2ec68e5-0f90-46f3-b0f7-1fdc8956c306" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 04 18:06:14 crc kubenswrapper[4631]: E1204 18:06:14.362560 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f175691-4966-4d0a-8a3a-418859733db9" containerName="registry-server" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.362566 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f175691-4966-4d0a-8a3a-418859733db9" containerName="registry-server" Dec 04 18:06:14 crc kubenswrapper[4631]: E1204 18:06:14.362576 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f175691-4966-4d0a-8a3a-418859733db9" containerName="extract-content" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.362582 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f175691-4966-4d0a-8a3a-418859733db9" containerName="extract-content" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.362778 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f175691-4966-4d0a-8a3a-418859733db9" containerName="registry-server" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.362803 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2ec68e5-0f90-46f3-b0f7-1fdc8956c306" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.363573 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.367926 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.368138 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.368225 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.368492 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.370629 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.372074 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-9284p" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.371960 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.372216 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.383951 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv"] Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.466595 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.466926 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66fvf\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-kube-api-access-66fvf\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.466988 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.467019 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.467052 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.467095 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.467206 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.467242 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.467270 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.467314 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.467466 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.467590 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.467700 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.467778 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.570244 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.570390 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.570448 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.570486 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.570504 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66fvf\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-kube-api-access-66fvf\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.570529 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.570550 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.570580 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.570627 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.570700 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.570736 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.570759 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.570799 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.570842 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.576077 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.577085 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.577148 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.578306 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.579021 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.579090 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.581382 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.582208 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.582764 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.585848 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.586828 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.587064 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.589270 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.595476 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66fvf\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-kube-api-access-66fvf\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:14 crc kubenswrapper[4631]: I1204 18:06:14.681523 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:06:15 crc kubenswrapper[4631]: I1204 18:06:15.203866 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv"] Dec 04 18:06:15 crc kubenswrapper[4631]: I1204 18:06:15.302576 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" event={"ID":"52d13c44-1eee-4a4b-bd73-982e9d57f0d8","Type":"ContainerStarted","Data":"e165f5e95d0f5c7a040ec5714bec7205e323a668c0dd7b21376d2bbed60687ed"} Dec 04 18:06:16 crc kubenswrapper[4631]: I1204 18:06:16.313998 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" event={"ID":"52d13c44-1eee-4a4b-bd73-982e9d57f0d8","Type":"ContainerStarted","Data":"8602c5e4807a1d1c98ab496d95c08a67248903a674e973fdfea03feb3c87cd8e"} Dec 04 18:06:16 crc kubenswrapper[4631]: I1204 18:06:16.321104 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-f885f"] Dec 04 18:06:16 crc kubenswrapper[4631]: I1204 18:06:16.323676 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f885f" Dec 04 18:06:16 crc kubenswrapper[4631]: I1204 18:06:16.344099 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f885f"] Dec 04 18:06:16 crc kubenswrapper[4631]: I1204 18:06:16.353880 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" podStartSLOduration=2.178431967 podStartE2EDuration="2.353859238s" podCreationTimestamp="2025-12-04 18:06:14 +0000 UTC" firstStartedPulling="2025-12-04 18:06:15.227926929 +0000 UTC m=+2305.260168927" lastFinishedPulling="2025-12-04 18:06:15.4033542 +0000 UTC m=+2305.435596198" observedRunningTime="2025-12-04 18:06:16.345854114 +0000 UTC m=+2306.378096122" watchObservedRunningTime="2025-12-04 18:06:16.353859238 +0000 UTC m=+2306.386101236" Dec 04 18:06:16 crc kubenswrapper[4631]: I1204 18:06:16.423518 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cb317f4-d27f-494d-8ffd-ecef87a94321-utilities\") pod \"community-operators-f885f\" (UID: \"2cb317f4-d27f-494d-8ffd-ecef87a94321\") " pod="openshift-marketplace/community-operators-f885f" Dec 04 18:06:16 crc kubenswrapper[4631]: I1204 18:06:16.423633 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5thf\" (UniqueName: \"kubernetes.io/projected/2cb317f4-d27f-494d-8ffd-ecef87a94321-kube-api-access-c5thf\") pod \"community-operators-f885f\" (UID: \"2cb317f4-d27f-494d-8ffd-ecef87a94321\") " pod="openshift-marketplace/community-operators-f885f" Dec 04 18:06:16 crc kubenswrapper[4631]: I1204 18:06:16.423811 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cb317f4-d27f-494d-8ffd-ecef87a94321-catalog-content\") pod \"community-operators-f885f\" (UID: \"2cb317f4-d27f-494d-8ffd-ecef87a94321\") " pod="openshift-marketplace/community-operators-f885f" Dec 04 18:06:16 crc kubenswrapper[4631]: I1204 18:06:16.525156 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5thf\" (UniqueName: \"kubernetes.io/projected/2cb317f4-d27f-494d-8ffd-ecef87a94321-kube-api-access-c5thf\") pod \"community-operators-f885f\" (UID: \"2cb317f4-d27f-494d-8ffd-ecef87a94321\") " pod="openshift-marketplace/community-operators-f885f" Dec 04 18:06:16 crc kubenswrapper[4631]: I1204 18:06:16.525322 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cb317f4-d27f-494d-8ffd-ecef87a94321-catalog-content\") pod \"community-operators-f885f\" (UID: \"2cb317f4-d27f-494d-8ffd-ecef87a94321\") " pod="openshift-marketplace/community-operators-f885f" Dec 04 18:06:16 crc kubenswrapper[4631]: I1204 18:06:16.526008 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cb317f4-d27f-494d-8ffd-ecef87a94321-catalog-content\") pod \"community-operators-f885f\" (UID: \"2cb317f4-d27f-494d-8ffd-ecef87a94321\") " pod="openshift-marketplace/community-operators-f885f" Dec 04 18:06:16 crc kubenswrapper[4631]: I1204 18:06:16.526163 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cb317f4-d27f-494d-8ffd-ecef87a94321-utilities\") pod \"community-operators-f885f\" (UID: \"2cb317f4-d27f-494d-8ffd-ecef87a94321\") " pod="openshift-marketplace/community-operators-f885f" Dec 04 18:06:16 crc kubenswrapper[4631]: I1204 18:06:16.526465 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cb317f4-d27f-494d-8ffd-ecef87a94321-utilities\") pod \"community-operators-f885f\" (UID: \"2cb317f4-d27f-494d-8ffd-ecef87a94321\") " pod="openshift-marketplace/community-operators-f885f" Dec 04 18:06:16 crc kubenswrapper[4631]: I1204 18:06:16.546585 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5thf\" (UniqueName: \"kubernetes.io/projected/2cb317f4-d27f-494d-8ffd-ecef87a94321-kube-api-access-c5thf\") pod \"community-operators-f885f\" (UID: \"2cb317f4-d27f-494d-8ffd-ecef87a94321\") " pod="openshift-marketplace/community-operators-f885f" Dec 04 18:06:16 crc kubenswrapper[4631]: I1204 18:06:16.662978 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f885f" Dec 04 18:06:17 crc kubenswrapper[4631]: I1204 18:06:17.209163 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f885f"] Dec 04 18:06:17 crc kubenswrapper[4631]: W1204 18:06:17.211122 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2cb317f4_d27f_494d_8ffd_ecef87a94321.slice/crio-47f29f33a188d4b0419d0edee1a11492b0707e5d12d032f5e8175d3c70817bd1 WatchSource:0}: Error finding container 47f29f33a188d4b0419d0edee1a11492b0707e5d12d032f5e8175d3c70817bd1: Status 404 returned error can't find the container with id 47f29f33a188d4b0419d0edee1a11492b0707e5d12d032f5e8175d3c70817bd1 Dec 04 18:06:17 crc kubenswrapper[4631]: I1204 18:06:17.324077 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f885f" event={"ID":"2cb317f4-d27f-494d-8ffd-ecef87a94321","Type":"ContainerStarted","Data":"47f29f33a188d4b0419d0edee1a11492b0707e5d12d032f5e8175d3c70817bd1"} Dec 04 18:06:18 crc kubenswrapper[4631]: I1204 18:06:18.333738 4631 generic.go:334] "Generic (PLEG): container finished" podID="2cb317f4-d27f-494d-8ffd-ecef87a94321" containerID="6612fb1eacfe4c7229c7548e4789a5f4793059373403964b018061bbcfff6ad6" exitCode=0 Dec 04 18:06:18 crc kubenswrapper[4631]: I1204 18:06:18.333839 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f885f" event={"ID":"2cb317f4-d27f-494d-8ffd-ecef87a94321","Type":"ContainerDied","Data":"6612fb1eacfe4c7229c7548e4789a5f4793059373403964b018061bbcfff6ad6"} Dec 04 18:06:19 crc kubenswrapper[4631]: I1204 18:06:19.345840 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f885f" event={"ID":"2cb317f4-d27f-494d-8ffd-ecef87a94321","Type":"ContainerStarted","Data":"e07849e7ae58e49b8df9e9004d096a13e91076f78e1f4834cc0e2773a547a112"} Dec 04 18:06:20 crc kubenswrapper[4631]: I1204 18:06:20.258110 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:06:20 crc kubenswrapper[4631]: E1204 18:06:20.259248 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:06:20 crc kubenswrapper[4631]: I1204 18:06:20.355179 4631 generic.go:334] "Generic (PLEG): container finished" podID="2cb317f4-d27f-494d-8ffd-ecef87a94321" containerID="e07849e7ae58e49b8df9e9004d096a13e91076f78e1f4834cc0e2773a547a112" exitCode=0 Dec 04 18:06:20 crc kubenswrapper[4631]: I1204 18:06:20.355216 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f885f" event={"ID":"2cb317f4-d27f-494d-8ffd-ecef87a94321","Type":"ContainerDied","Data":"e07849e7ae58e49b8df9e9004d096a13e91076f78e1f4834cc0e2773a547a112"} Dec 04 18:06:22 crc kubenswrapper[4631]: I1204 18:06:22.419783 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f885f" event={"ID":"2cb317f4-d27f-494d-8ffd-ecef87a94321","Type":"ContainerStarted","Data":"74c51a851f70928c9c7f442295301555fdaa4b20c0784e4d662458436856dd46"} Dec 04 18:06:26 crc kubenswrapper[4631]: I1204 18:06:26.663208 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-f885f" Dec 04 18:06:26 crc kubenswrapper[4631]: I1204 18:06:26.663841 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-f885f" Dec 04 18:06:26 crc kubenswrapper[4631]: I1204 18:06:26.710584 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-f885f" Dec 04 18:06:26 crc kubenswrapper[4631]: I1204 18:06:26.730192 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-f885f" podStartSLOduration=8.301151198 podStartE2EDuration="10.730176379s" podCreationTimestamp="2025-12-04 18:06:16 +0000 UTC" firstStartedPulling="2025-12-04 18:06:18.335588803 +0000 UTC m=+2308.367830811" lastFinishedPulling="2025-12-04 18:06:20.764613994 +0000 UTC m=+2310.796855992" observedRunningTime="2025-12-04 18:06:22.444781444 +0000 UTC m=+2312.477023452" watchObservedRunningTime="2025-12-04 18:06:26.730176379 +0000 UTC m=+2316.762418377" Dec 04 18:06:27 crc kubenswrapper[4631]: I1204 18:06:27.513667 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-f885f" Dec 04 18:06:28 crc kubenswrapper[4631]: I1204 18:06:28.896133 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f885f"] Dec 04 18:06:29 crc kubenswrapper[4631]: I1204 18:06:29.480733 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-f885f" podUID="2cb317f4-d27f-494d-8ffd-ecef87a94321" containerName="registry-server" containerID="cri-o://74c51a851f70928c9c7f442295301555fdaa4b20c0784e4d662458436856dd46" gracePeriod=2 Dec 04 18:06:29 crc kubenswrapper[4631]: I1204 18:06:29.909485 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f885f" Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.084537 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cb317f4-d27f-494d-8ffd-ecef87a94321-catalog-content\") pod \"2cb317f4-d27f-494d-8ffd-ecef87a94321\" (UID: \"2cb317f4-d27f-494d-8ffd-ecef87a94321\") " Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.084687 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5thf\" (UniqueName: \"kubernetes.io/projected/2cb317f4-d27f-494d-8ffd-ecef87a94321-kube-api-access-c5thf\") pod \"2cb317f4-d27f-494d-8ffd-ecef87a94321\" (UID: \"2cb317f4-d27f-494d-8ffd-ecef87a94321\") " Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.084715 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cb317f4-d27f-494d-8ffd-ecef87a94321-utilities\") pod \"2cb317f4-d27f-494d-8ffd-ecef87a94321\" (UID: \"2cb317f4-d27f-494d-8ffd-ecef87a94321\") " Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.085969 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2cb317f4-d27f-494d-8ffd-ecef87a94321-utilities" (OuterVolumeSpecName: "utilities") pod "2cb317f4-d27f-494d-8ffd-ecef87a94321" (UID: "2cb317f4-d27f-494d-8ffd-ecef87a94321"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.093558 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cb317f4-d27f-494d-8ffd-ecef87a94321-kube-api-access-c5thf" (OuterVolumeSpecName: "kube-api-access-c5thf") pod "2cb317f4-d27f-494d-8ffd-ecef87a94321" (UID: "2cb317f4-d27f-494d-8ffd-ecef87a94321"). InnerVolumeSpecName "kube-api-access-c5thf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.187183 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5thf\" (UniqueName: \"kubernetes.io/projected/2cb317f4-d27f-494d-8ffd-ecef87a94321-kube-api-access-c5thf\") on node \"crc\" DevicePath \"\"" Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.187214 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cb317f4-d27f-494d-8ffd-ecef87a94321-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.223088 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2cb317f4-d27f-494d-8ffd-ecef87a94321-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2cb317f4-d27f-494d-8ffd-ecef87a94321" (UID: "2cb317f4-d27f-494d-8ffd-ecef87a94321"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.289437 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cb317f4-d27f-494d-8ffd-ecef87a94321-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.498786 4631 generic.go:334] "Generic (PLEG): container finished" podID="2cb317f4-d27f-494d-8ffd-ecef87a94321" containerID="74c51a851f70928c9c7f442295301555fdaa4b20c0784e4d662458436856dd46" exitCode=0 Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.498832 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f885f" event={"ID":"2cb317f4-d27f-494d-8ffd-ecef87a94321","Type":"ContainerDied","Data":"74c51a851f70928c9c7f442295301555fdaa4b20c0784e4d662458436856dd46"} Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.498933 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f885f" Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.498958 4631 scope.go:117] "RemoveContainer" containerID="74c51a851f70928c9c7f442295301555fdaa4b20c0784e4d662458436856dd46" Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.498944 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f885f" event={"ID":"2cb317f4-d27f-494d-8ffd-ecef87a94321","Type":"ContainerDied","Data":"47f29f33a188d4b0419d0edee1a11492b0707e5d12d032f5e8175d3c70817bd1"} Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.526957 4631 scope.go:117] "RemoveContainer" containerID="e07849e7ae58e49b8df9e9004d096a13e91076f78e1f4834cc0e2773a547a112" Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.537865 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f885f"] Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.547922 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-f885f"] Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.558489 4631 scope.go:117] "RemoveContainer" containerID="6612fb1eacfe4c7229c7548e4789a5f4793059373403964b018061bbcfff6ad6" Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.588771 4631 scope.go:117] "RemoveContainer" containerID="74c51a851f70928c9c7f442295301555fdaa4b20c0784e4d662458436856dd46" Dec 04 18:06:30 crc kubenswrapper[4631]: E1204 18:06:30.589644 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74c51a851f70928c9c7f442295301555fdaa4b20c0784e4d662458436856dd46\": container with ID starting with 74c51a851f70928c9c7f442295301555fdaa4b20c0784e4d662458436856dd46 not found: ID does not exist" containerID="74c51a851f70928c9c7f442295301555fdaa4b20c0784e4d662458436856dd46" Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.589701 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74c51a851f70928c9c7f442295301555fdaa4b20c0784e4d662458436856dd46"} err="failed to get container status \"74c51a851f70928c9c7f442295301555fdaa4b20c0784e4d662458436856dd46\": rpc error: code = NotFound desc = could not find container \"74c51a851f70928c9c7f442295301555fdaa4b20c0784e4d662458436856dd46\": container with ID starting with 74c51a851f70928c9c7f442295301555fdaa4b20c0784e4d662458436856dd46 not found: ID does not exist" Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.589733 4631 scope.go:117] "RemoveContainer" containerID="e07849e7ae58e49b8df9e9004d096a13e91076f78e1f4834cc0e2773a547a112" Dec 04 18:06:30 crc kubenswrapper[4631]: E1204 18:06:30.590259 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e07849e7ae58e49b8df9e9004d096a13e91076f78e1f4834cc0e2773a547a112\": container with ID starting with e07849e7ae58e49b8df9e9004d096a13e91076f78e1f4834cc0e2773a547a112 not found: ID does not exist" containerID="e07849e7ae58e49b8df9e9004d096a13e91076f78e1f4834cc0e2773a547a112" Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.590302 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e07849e7ae58e49b8df9e9004d096a13e91076f78e1f4834cc0e2773a547a112"} err="failed to get container status \"e07849e7ae58e49b8df9e9004d096a13e91076f78e1f4834cc0e2773a547a112\": rpc error: code = NotFound desc = could not find container \"e07849e7ae58e49b8df9e9004d096a13e91076f78e1f4834cc0e2773a547a112\": container with ID starting with e07849e7ae58e49b8df9e9004d096a13e91076f78e1f4834cc0e2773a547a112 not found: ID does not exist" Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.590328 4631 scope.go:117] "RemoveContainer" containerID="6612fb1eacfe4c7229c7548e4789a5f4793059373403964b018061bbcfff6ad6" Dec 04 18:06:30 crc kubenswrapper[4631]: E1204 18:06:30.590692 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6612fb1eacfe4c7229c7548e4789a5f4793059373403964b018061bbcfff6ad6\": container with ID starting with 6612fb1eacfe4c7229c7548e4789a5f4793059373403964b018061bbcfff6ad6 not found: ID does not exist" containerID="6612fb1eacfe4c7229c7548e4789a5f4793059373403964b018061bbcfff6ad6" Dec 04 18:06:30 crc kubenswrapper[4631]: I1204 18:06:30.590735 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6612fb1eacfe4c7229c7548e4789a5f4793059373403964b018061bbcfff6ad6"} err="failed to get container status \"6612fb1eacfe4c7229c7548e4789a5f4793059373403964b018061bbcfff6ad6\": rpc error: code = NotFound desc = could not find container \"6612fb1eacfe4c7229c7548e4789a5f4793059373403964b018061bbcfff6ad6\": container with ID starting with 6612fb1eacfe4c7229c7548e4789a5f4793059373403964b018061bbcfff6ad6 not found: ID does not exist" Dec 04 18:06:31 crc kubenswrapper[4631]: I1204 18:06:31.239506 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:06:31 crc kubenswrapper[4631]: E1204 18:06:31.239747 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:06:32 crc kubenswrapper[4631]: I1204 18:06:32.258240 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2cb317f4-d27f-494d-8ffd-ecef87a94321" path="/var/lib/kubelet/pods/2cb317f4-d27f-494d-8ffd-ecef87a94321/volumes" Dec 04 18:06:45 crc kubenswrapper[4631]: I1204 18:06:45.239704 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:06:45 crc kubenswrapper[4631]: E1204 18:06:45.241880 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:06:58 crc kubenswrapper[4631]: I1204 18:06:58.771574 4631 generic.go:334] "Generic (PLEG): container finished" podID="52d13c44-1eee-4a4b-bd73-982e9d57f0d8" containerID="8602c5e4807a1d1c98ab496d95c08a67248903a674e973fdfea03feb3c87cd8e" exitCode=0 Dec 04 18:06:58 crc kubenswrapper[4631]: I1204 18:06:58.771651 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" event={"ID":"52d13c44-1eee-4a4b-bd73-982e9d57f0d8","Type":"ContainerDied","Data":"8602c5e4807a1d1c98ab496d95c08a67248903a674e973fdfea03feb3c87cd8e"} Dec 04 18:06:59 crc kubenswrapper[4631]: I1204 18:06:59.239980 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:06:59 crc kubenswrapper[4631]: E1204 18:06:59.240311 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.181671 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.263466 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-inventory\") pod \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.263537 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-repo-setup-combined-ca-bundle\") pod \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.263623 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.264527 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-nova-combined-ca-bundle\") pod \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.264561 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-neutron-metadata-combined-ca-bundle\") pod \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.264614 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.264641 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-bootstrap-combined-ca-bundle\") pod \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.264694 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-ssh-key\") pod \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.264839 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-ovn-combined-ca-bundle\") pod \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.264862 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-libvirt-combined-ca-bundle\") pod \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.264909 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66fvf\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-kube-api-access-66fvf\") pod \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.264938 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-telemetry-combined-ca-bundle\") pod \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.264963 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-ovn-default-certs-0\") pod \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.265035 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\" (UID: \"52d13c44-1eee-4a4b-bd73-982e9d57f0d8\") " Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.270475 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "52d13c44-1eee-4a4b-bd73-982e9d57f0d8" (UID: "52d13c44-1eee-4a4b-bd73-982e9d57f0d8"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.270576 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "52d13c44-1eee-4a4b-bd73-982e9d57f0d8" (UID: "52d13c44-1eee-4a4b-bd73-982e9d57f0d8"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.270892 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "52d13c44-1eee-4a4b-bd73-982e9d57f0d8" (UID: "52d13c44-1eee-4a4b-bd73-982e9d57f0d8"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.271933 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "52d13c44-1eee-4a4b-bd73-982e9d57f0d8" (UID: "52d13c44-1eee-4a4b-bd73-982e9d57f0d8"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.272242 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-kube-api-access-66fvf" (OuterVolumeSpecName: "kube-api-access-66fvf") pod "52d13c44-1eee-4a4b-bd73-982e9d57f0d8" (UID: "52d13c44-1eee-4a4b-bd73-982e9d57f0d8"). InnerVolumeSpecName "kube-api-access-66fvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.272518 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "52d13c44-1eee-4a4b-bd73-982e9d57f0d8" (UID: "52d13c44-1eee-4a4b-bd73-982e9d57f0d8"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.273342 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "52d13c44-1eee-4a4b-bd73-982e9d57f0d8" (UID: "52d13c44-1eee-4a4b-bd73-982e9d57f0d8"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.274057 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "52d13c44-1eee-4a4b-bd73-982e9d57f0d8" (UID: "52d13c44-1eee-4a4b-bd73-982e9d57f0d8"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.275116 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "52d13c44-1eee-4a4b-bd73-982e9d57f0d8" (UID: "52d13c44-1eee-4a4b-bd73-982e9d57f0d8"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.276497 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "52d13c44-1eee-4a4b-bd73-982e9d57f0d8" (UID: "52d13c44-1eee-4a4b-bd73-982e9d57f0d8"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.276798 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "52d13c44-1eee-4a4b-bd73-982e9d57f0d8" (UID: "52d13c44-1eee-4a4b-bd73-982e9d57f0d8"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.278498 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "52d13c44-1eee-4a4b-bd73-982e9d57f0d8" (UID: "52d13c44-1eee-4a4b-bd73-982e9d57f0d8"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.296927 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "52d13c44-1eee-4a4b-bd73-982e9d57f0d8" (UID: "52d13c44-1eee-4a4b-bd73-982e9d57f0d8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.305329 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-inventory" (OuterVolumeSpecName: "inventory") pod "52d13c44-1eee-4a4b-bd73-982e9d57f0d8" (UID: "52d13c44-1eee-4a4b-bd73-982e9d57f0d8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.368785 4631 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.368986 4631 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.369075 4631 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.369134 4631 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.369195 4631 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.369246 4631 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.369302 4631 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.369356 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66fvf\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-kube-api-access-66fvf\") on node \"crc\" DevicePath \"\"" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.369461 4631 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.369528 4631 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.369589 4631 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.369644 4631 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.369700 4631 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.369753 4631 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/52d13c44-1eee-4a4b-bd73-982e9d57f0d8-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.791509 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" event={"ID":"52d13c44-1eee-4a4b-bd73-982e9d57f0d8","Type":"ContainerDied","Data":"e165f5e95d0f5c7a040ec5714bec7205e323a668c0dd7b21376d2bbed60687ed"} Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.791568 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e165f5e95d0f5c7a040ec5714bec7205e323a668c0dd7b21376d2bbed60687ed" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.791583 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.954424 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8"] Dec 04 18:07:00 crc kubenswrapper[4631]: E1204 18:07:00.954815 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52d13c44-1eee-4a4b-bd73-982e9d57f0d8" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.954830 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="52d13c44-1eee-4a4b-bd73-982e9d57f0d8" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 04 18:07:00 crc kubenswrapper[4631]: E1204 18:07:00.954840 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cb317f4-d27f-494d-8ffd-ecef87a94321" containerName="registry-server" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.954847 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cb317f4-d27f-494d-8ffd-ecef87a94321" containerName="registry-server" Dec 04 18:07:00 crc kubenswrapper[4631]: E1204 18:07:00.954879 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cb317f4-d27f-494d-8ffd-ecef87a94321" containerName="extract-utilities" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.954887 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cb317f4-d27f-494d-8ffd-ecef87a94321" containerName="extract-utilities" Dec 04 18:07:00 crc kubenswrapper[4631]: E1204 18:07:00.954904 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cb317f4-d27f-494d-8ffd-ecef87a94321" containerName="extract-content" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.954910 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cb317f4-d27f-494d-8ffd-ecef87a94321" containerName="extract-content" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.955076 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cb317f4-d27f-494d-8ffd-ecef87a94321" containerName="registry-server" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.955092 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="52d13c44-1eee-4a4b-bd73-982e9d57f0d8" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.955815 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.957426 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.957953 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.957965 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.958021 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-9284p" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.969695 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.978591 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qspr8\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.978705 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qspr8\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.978727 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qspr8\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.978784 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4f2f\" (UniqueName: \"kubernetes.io/projected/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-kube-api-access-n4f2f\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qspr8\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.978804 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qspr8\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" Dec 04 18:07:00 crc kubenswrapper[4631]: I1204 18:07:00.979818 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8"] Dec 04 18:07:01 crc kubenswrapper[4631]: I1204 18:07:01.080763 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qspr8\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" Dec 04 18:07:01 crc kubenswrapper[4631]: I1204 18:07:01.080850 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qspr8\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" Dec 04 18:07:01 crc kubenswrapper[4631]: I1204 18:07:01.080873 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qspr8\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" Dec 04 18:07:01 crc kubenswrapper[4631]: I1204 18:07:01.080945 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qspr8\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" Dec 04 18:07:01 crc kubenswrapper[4631]: I1204 18:07:01.080965 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4f2f\" (UniqueName: \"kubernetes.io/projected/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-kube-api-access-n4f2f\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qspr8\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" Dec 04 18:07:01 crc kubenswrapper[4631]: I1204 18:07:01.082238 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qspr8\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" Dec 04 18:07:01 crc kubenswrapper[4631]: I1204 18:07:01.086158 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qspr8\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" Dec 04 18:07:01 crc kubenswrapper[4631]: I1204 18:07:01.091425 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qspr8\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" Dec 04 18:07:01 crc kubenswrapper[4631]: I1204 18:07:01.092405 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qspr8\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" Dec 04 18:07:01 crc kubenswrapper[4631]: I1204 18:07:01.096730 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4f2f\" (UniqueName: \"kubernetes.io/projected/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-kube-api-access-n4f2f\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qspr8\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" Dec 04 18:07:01 crc kubenswrapper[4631]: I1204 18:07:01.275310 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" Dec 04 18:07:01 crc kubenswrapper[4631]: I1204 18:07:01.799124 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8"] Dec 04 18:07:01 crc kubenswrapper[4631]: I1204 18:07:01.803486 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" event={"ID":"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1","Type":"ContainerStarted","Data":"27ccec0ad3fa25ae74fe39395980ea3933b2210be26bf9fda9255771c8ee40f6"} Dec 04 18:07:02 crc kubenswrapper[4631]: I1204 18:07:02.811478 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" event={"ID":"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1","Type":"ContainerStarted","Data":"0774521fc58b9b35813cef4d473ea287023e4382355676fde0d54272a4bc76a4"} Dec 04 18:07:02 crc kubenswrapper[4631]: I1204 18:07:02.831295 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" podStartSLOduration=2.656331598 podStartE2EDuration="2.83127854s" podCreationTimestamp="2025-12-04 18:07:00 +0000 UTC" firstStartedPulling="2025-12-04 18:07:01.795040859 +0000 UTC m=+2351.827282857" lastFinishedPulling="2025-12-04 18:07:01.969987801 +0000 UTC m=+2352.002229799" observedRunningTime="2025-12-04 18:07:02.824918484 +0000 UTC m=+2352.857160472" watchObservedRunningTime="2025-12-04 18:07:02.83127854 +0000 UTC m=+2352.863520538" Dec 04 18:07:13 crc kubenswrapper[4631]: I1204 18:07:13.239190 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:07:13 crc kubenswrapper[4631]: E1204 18:07:13.239892 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:07:27 crc kubenswrapper[4631]: I1204 18:07:27.239835 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:07:27 crc kubenswrapper[4631]: E1204 18:07:27.240487 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:07:40 crc kubenswrapper[4631]: I1204 18:07:40.246252 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:07:40 crc kubenswrapper[4631]: E1204 18:07:40.248776 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:07:54 crc kubenswrapper[4631]: I1204 18:07:54.240238 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:07:54 crc kubenswrapper[4631]: E1204 18:07:54.240974 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:08:05 crc kubenswrapper[4631]: I1204 18:08:05.239485 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:08:05 crc kubenswrapper[4631]: E1204 18:08:05.240321 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:08:12 crc kubenswrapper[4631]: I1204 18:08:12.402107 4631 generic.go:334] "Generic (PLEG): container finished" podID="e9c5fea5-b0f9-4894-bf45-699c8b23d9f1" containerID="0774521fc58b9b35813cef4d473ea287023e4382355676fde0d54272a4bc76a4" exitCode=0 Dec 04 18:08:12 crc kubenswrapper[4631]: I1204 18:08:12.402187 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" event={"ID":"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1","Type":"ContainerDied","Data":"0774521fc58b9b35813cef4d473ea287023e4382355676fde0d54272a4bc76a4"} Dec 04 18:08:13 crc kubenswrapper[4631]: I1204 18:08:13.827582 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" Dec 04 18:08:13 crc kubenswrapper[4631]: I1204 18:08:13.939150 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n4f2f\" (UniqueName: \"kubernetes.io/projected/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-kube-api-access-n4f2f\") pod \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " Dec 04 18:08:13 crc kubenswrapper[4631]: I1204 18:08:13.939418 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-ovn-combined-ca-bundle\") pod \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " Dec 04 18:08:13 crc kubenswrapper[4631]: I1204 18:08:13.939443 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-ovncontroller-config-0\") pod \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " Dec 04 18:08:13 crc kubenswrapper[4631]: I1204 18:08:13.939895 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-ssh-key\") pod \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " Dec 04 18:08:13 crc kubenswrapper[4631]: I1204 18:08:13.939998 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-inventory\") pod \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\" (UID: \"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1\") " Dec 04 18:08:13 crc kubenswrapper[4631]: I1204 18:08:13.944735 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-kube-api-access-n4f2f" (OuterVolumeSpecName: "kube-api-access-n4f2f") pod "e9c5fea5-b0f9-4894-bf45-699c8b23d9f1" (UID: "e9c5fea5-b0f9-4894-bf45-699c8b23d9f1"). InnerVolumeSpecName "kube-api-access-n4f2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:08:13 crc kubenswrapper[4631]: I1204 18:08:13.945123 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "e9c5fea5-b0f9-4894-bf45-699c8b23d9f1" (UID: "e9c5fea5-b0f9-4894-bf45-699c8b23d9f1"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:08:13 crc kubenswrapper[4631]: I1204 18:08:13.964233 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "e9c5fea5-b0f9-4894-bf45-699c8b23d9f1" (UID: "e9c5fea5-b0f9-4894-bf45-699c8b23d9f1"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 18:08:13 crc kubenswrapper[4631]: I1204 18:08:13.967551 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e9c5fea5-b0f9-4894-bf45-699c8b23d9f1" (UID: "e9c5fea5-b0f9-4894-bf45-699c8b23d9f1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:08:13 crc kubenswrapper[4631]: I1204 18:08:13.968024 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-inventory" (OuterVolumeSpecName: "inventory") pod "e9c5fea5-b0f9-4894-bf45-699c8b23d9f1" (UID: "e9c5fea5-b0f9-4894-bf45-699c8b23d9f1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.042841 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n4f2f\" (UniqueName: \"kubernetes.io/projected/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-kube-api-access-n4f2f\") on node \"crc\" DevicePath \"\"" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.042882 4631 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.042895 4631 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.042908 4631 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.042919 4631 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9c5fea5-b0f9-4894-bf45-699c8b23d9f1-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.420431 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" event={"ID":"e9c5fea5-b0f9-4894-bf45-699c8b23d9f1","Type":"ContainerDied","Data":"27ccec0ad3fa25ae74fe39395980ea3933b2210be26bf9fda9255771c8ee40f6"} Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.420475 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27ccec0ad3fa25ae74fe39395980ea3933b2210be26bf9fda9255771c8ee40f6" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.420518 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qspr8" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.513962 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg"] Dec 04 18:08:14 crc kubenswrapper[4631]: E1204 18:08:14.514487 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9c5fea5-b0f9-4894-bf45-699c8b23d9f1" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.514507 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9c5fea5-b0f9-4894-bf45-699c8b23d9f1" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.514820 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9c5fea5-b0f9-4894-bf45-699c8b23d9f1" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.515600 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.518223 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-9284p" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.520643 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.520800 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.520661 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.521021 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.521665 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.531274 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg"] Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.655086 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.655180 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fnn2\" (UniqueName: \"kubernetes.io/projected/5bba5c47-0692-477b-9483-f80218571763-kube-api-access-8fnn2\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.655231 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.655269 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.655326 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.655371 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.757419 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fnn2\" (UniqueName: \"kubernetes.io/projected/5bba5c47-0692-477b-9483-f80218571763-kube-api-access-8fnn2\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.757535 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.757585 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.757613 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.757671 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.757734 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.763283 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.763322 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.763291 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.764981 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.780126 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fnn2\" (UniqueName: \"kubernetes.io/projected/5bba5c47-0692-477b-9483-f80218571763-kube-api-access-8fnn2\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.780684 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:14 crc kubenswrapper[4631]: I1204 18:08:14.831510 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:08:15 crc kubenswrapper[4631]: I1204 18:08:15.337106 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg"] Dec 04 18:08:15 crc kubenswrapper[4631]: W1204 18:08:15.344873 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5bba5c47_0692_477b_9483_f80218571763.slice/crio-00c63f3f5eaad125920a538a55daf9a0f5bf3b9e44de9185ff64889911dae128 WatchSource:0}: Error finding container 00c63f3f5eaad125920a538a55daf9a0f5bf3b9e44de9185ff64889911dae128: Status 404 returned error can't find the container with id 00c63f3f5eaad125920a538a55daf9a0f5bf3b9e44de9185ff64889911dae128 Dec 04 18:08:15 crc kubenswrapper[4631]: I1204 18:08:15.429682 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" event={"ID":"5bba5c47-0692-477b-9483-f80218571763","Type":"ContainerStarted","Data":"00c63f3f5eaad125920a538a55daf9a0f5bf3b9e44de9185ff64889911dae128"} Dec 04 18:08:16 crc kubenswrapper[4631]: I1204 18:08:16.239244 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:08:16 crc kubenswrapper[4631]: E1204 18:08:16.239636 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:08:16 crc kubenswrapper[4631]: I1204 18:08:16.443002 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" event={"ID":"5bba5c47-0692-477b-9483-f80218571763","Type":"ContainerStarted","Data":"df103071459826e6f04a1a3ade524d345a21a1783b505c31572c68064a955227"} Dec 04 18:08:31 crc kubenswrapper[4631]: I1204 18:08:31.240061 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:08:31 crc kubenswrapper[4631]: E1204 18:08:31.241203 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:08:44 crc kubenswrapper[4631]: I1204 18:08:44.239878 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:08:44 crc kubenswrapper[4631]: E1204 18:08:44.240544 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:08:59 crc kubenswrapper[4631]: I1204 18:08:59.239777 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:08:59 crc kubenswrapper[4631]: E1204 18:08:59.240546 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:09:08 crc kubenswrapper[4631]: I1204 18:09:08.863060 4631 generic.go:334] "Generic (PLEG): container finished" podID="5bba5c47-0692-477b-9483-f80218571763" containerID="df103071459826e6f04a1a3ade524d345a21a1783b505c31572c68064a955227" exitCode=0 Dec 04 18:09:08 crc kubenswrapper[4631]: I1204 18:09:08.863086 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" event={"ID":"5bba5c47-0692-477b-9483-f80218571763","Type":"ContainerDied","Data":"df103071459826e6f04a1a3ade524d345a21a1783b505c31572c68064a955227"} Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.246899 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:09:10 crc kubenswrapper[4631]: E1204 18:09:10.248433 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.355172 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.444760 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-neutron-ovn-metadata-agent-neutron-config-0\") pod \"5bba5c47-0692-477b-9483-f80218571763\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.445323 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-ssh-key\") pod \"5bba5c47-0692-477b-9483-f80218571763\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.445416 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8fnn2\" (UniqueName: \"kubernetes.io/projected/5bba5c47-0692-477b-9483-f80218571763-kube-api-access-8fnn2\") pod \"5bba5c47-0692-477b-9483-f80218571763\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.445450 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-nova-metadata-neutron-config-0\") pod \"5bba5c47-0692-477b-9483-f80218571763\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.445527 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-neutron-metadata-combined-ca-bundle\") pod \"5bba5c47-0692-477b-9483-f80218571763\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.445671 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-inventory\") pod \"5bba5c47-0692-477b-9483-f80218571763\" (UID: \"5bba5c47-0692-477b-9483-f80218571763\") " Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.454427 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "5bba5c47-0692-477b-9483-f80218571763" (UID: "5bba5c47-0692-477b-9483-f80218571763"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.461648 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bba5c47-0692-477b-9483-f80218571763-kube-api-access-8fnn2" (OuterVolumeSpecName: "kube-api-access-8fnn2") pod "5bba5c47-0692-477b-9483-f80218571763" (UID: "5bba5c47-0692-477b-9483-f80218571763"). InnerVolumeSpecName "kube-api-access-8fnn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.479879 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "5bba5c47-0692-477b-9483-f80218571763" (UID: "5bba5c47-0692-477b-9483-f80218571763"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.480585 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-inventory" (OuterVolumeSpecName: "inventory") pod "5bba5c47-0692-477b-9483-f80218571763" (UID: "5bba5c47-0692-477b-9483-f80218571763"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.489673 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5bba5c47-0692-477b-9483-f80218571763" (UID: "5bba5c47-0692-477b-9483-f80218571763"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.499479 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "5bba5c47-0692-477b-9483-f80218571763" (UID: "5bba5c47-0692-477b-9483-f80218571763"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.548589 4631 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.548636 4631 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.548655 4631 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.548677 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8fnn2\" (UniqueName: \"kubernetes.io/projected/5bba5c47-0692-477b-9483-f80218571763-kube-api-access-8fnn2\") on node \"crc\" DevicePath \"\"" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.548692 4631 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.548702 4631 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bba5c47-0692-477b-9483-f80218571763-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.883344 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" event={"ID":"5bba5c47-0692-477b-9483-f80218571763","Type":"ContainerDied","Data":"00c63f3f5eaad125920a538a55daf9a0f5bf3b9e44de9185ff64889911dae128"} Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.883489 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="00c63f3f5eaad125920a538a55daf9a0f5bf3b9e44de9185ff64889911dae128" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.883422 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.982238 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8"] Dec 04 18:09:10 crc kubenswrapper[4631]: E1204 18:09:10.982633 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bba5c47-0692-477b-9483-f80218571763" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.982650 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bba5c47-0692-477b-9483-f80218571763" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.982838 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bba5c47-0692-477b-9483-f80218571763" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.983499 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.986759 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.987505 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-9284p" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.987903 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.988055 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.988209 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 18:09:10 crc kubenswrapper[4631]: I1204 18:09:10.996134 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8"] Dec 04 18:09:11 crc kubenswrapper[4631]: I1204 18:09:11.059021 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" Dec 04 18:09:11 crc kubenswrapper[4631]: I1204 18:09:11.059321 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwxrq\" (UniqueName: \"kubernetes.io/projected/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-kube-api-access-rwxrq\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" Dec 04 18:09:11 crc kubenswrapper[4631]: I1204 18:09:11.059457 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" Dec 04 18:09:11 crc kubenswrapper[4631]: I1204 18:09:11.059601 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" Dec 04 18:09:11 crc kubenswrapper[4631]: I1204 18:09:11.059707 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" Dec 04 18:09:11 crc kubenswrapper[4631]: I1204 18:09:11.160873 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" Dec 04 18:09:11 crc kubenswrapper[4631]: I1204 18:09:11.160949 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" Dec 04 18:09:11 crc kubenswrapper[4631]: I1204 18:09:11.161028 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" Dec 04 18:09:11 crc kubenswrapper[4631]: I1204 18:09:11.161082 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwxrq\" (UniqueName: \"kubernetes.io/projected/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-kube-api-access-rwxrq\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" Dec 04 18:09:11 crc kubenswrapper[4631]: I1204 18:09:11.161122 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" Dec 04 18:09:11 crc kubenswrapper[4631]: I1204 18:09:11.164572 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" Dec 04 18:09:11 crc kubenswrapper[4631]: I1204 18:09:11.165168 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" Dec 04 18:09:11 crc kubenswrapper[4631]: I1204 18:09:11.166244 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" Dec 04 18:09:11 crc kubenswrapper[4631]: I1204 18:09:11.170729 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" Dec 04 18:09:11 crc kubenswrapper[4631]: I1204 18:09:11.186680 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwxrq\" (UniqueName: \"kubernetes.io/projected/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-kube-api-access-rwxrq\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" Dec 04 18:09:11 crc kubenswrapper[4631]: I1204 18:09:11.309137 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" Dec 04 18:09:11 crc kubenswrapper[4631]: I1204 18:09:11.798252 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8"] Dec 04 18:09:11 crc kubenswrapper[4631]: I1204 18:09:11.892217 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" event={"ID":"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec","Type":"ContainerStarted","Data":"9659af08cac86542a61ab481a025314a75074285356792202267ded21a099525"} Dec 04 18:09:12 crc kubenswrapper[4631]: I1204 18:09:12.904355 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" event={"ID":"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec","Type":"ContainerStarted","Data":"15b964b2883fa9b1b94c4a70018f11a785aa292aaa517fc3bfbacf01d5151b84"} Dec 04 18:09:12 crc kubenswrapper[4631]: I1204 18:09:12.928515 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" podStartSLOduration=2.578444062 podStartE2EDuration="2.928495173s" podCreationTimestamp="2025-12-04 18:09:10 +0000 UTC" firstStartedPulling="2025-12-04 18:09:11.797308221 +0000 UTC m=+2481.829550219" lastFinishedPulling="2025-12-04 18:09:12.147359332 +0000 UTC m=+2482.179601330" observedRunningTime="2025-12-04 18:09:12.925011282 +0000 UTC m=+2482.957253300" watchObservedRunningTime="2025-12-04 18:09:12.928495173 +0000 UTC m=+2482.960737191" Dec 04 18:09:25 crc kubenswrapper[4631]: I1204 18:09:25.239068 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:09:25 crc kubenswrapper[4631]: E1204 18:09:25.240072 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:09:38 crc kubenswrapper[4631]: I1204 18:09:38.240158 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:09:38 crc kubenswrapper[4631]: E1204 18:09:38.241549 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:09:53 crc kubenswrapper[4631]: I1204 18:09:53.239834 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:09:53 crc kubenswrapper[4631]: E1204 18:09:53.240576 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:10:04 crc kubenswrapper[4631]: I1204 18:10:04.240392 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:10:04 crc kubenswrapper[4631]: E1204 18:10:04.241005 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:10:16 crc kubenswrapper[4631]: I1204 18:10:16.239705 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:10:16 crc kubenswrapper[4631]: I1204 18:10:16.470976 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"6cf6e79e6d18aca329f54327d5530379d0ca975dbed57eaeda5328e285791625"} Dec 04 18:12:36 crc kubenswrapper[4631]: I1204 18:12:36.023079 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:12:36 crc kubenswrapper[4631]: I1204 18:12:36.023811 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:12:54 crc kubenswrapper[4631]: I1204 18:12:54.769206 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dnw4r"] Dec 04 18:12:54 crc kubenswrapper[4631]: I1204 18:12:54.772262 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dnw4r" Dec 04 18:12:54 crc kubenswrapper[4631]: I1204 18:12:54.791649 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dnw4r"] Dec 04 18:12:54 crc kubenswrapper[4631]: I1204 18:12:54.899825 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/658a9a08-2561-4186-a486-c47a79c2019a-utilities\") pod \"certified-operators-dnw4r\" (UID: \"658a9a08-2561-4186-a486-c47a79c2019a\") " pod="openshift-marketplace/certified-operators-dnw4r" Dec 04 18:12:54 crc kubenswrapper[4631]: I1204 18:12:54.899945 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxkl9\" (UniqueName: \"kubernetes.io/projected/658a9a08-2561-4186-a486-c47a79c2019a-kube-api-access-lxkl9\") pod \"certified-operators-dnw4r\" (UID: \"658a9a08-2561-4186-a486-c47a79c2019a\") " pod="openshift-marketplace/certified-operators-dnw4r" Dec 04 18:12:54 crc kubenswrapper[4631]: I1204 18:12:54.900059 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/658a9a08-2561-4186-a486-c47a79c2019a-catalog-content\") pod \"certified-operators-dnw4r\" (UID: \"658a9a08-2561-4186-a486-c47a79c2019a\") " pod="openshift-marketplace/certified-operators-dnw4r" Dec 04 18:12:55 crc kubenswrapper[4631]: I1204 18:12:55.001761 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxkl9\" (UniqueName: \"kubernetes.io/projected/658a9a08-2561-4186-a486-c47a79c2019a-kube-api-access-lxkl9\") pod \"certified-operators-dnw4r\" (UID: \"658a9a08-2561-4186-a486-c47a79c2019a\") " pod="openshift-marketplace/certified-operators-dnw4r" Dec 04 18:12:55 crc kubenswrapper[4631]: I1204 18:12:55.001921 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/658a9a08-2561-4186-a486-c47a79c2019a-catalog-content\") pod \"certified-operators-dnw4r\" (UID: \"658a9a08-2561-4186-a486-c47a79c2019a\") " pod="openshift-marketplace/certified-operators-dnw4r" Dec 04 18:12:55 crc kubenswrapper[4631]: I1204 18:12:55.001990 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/658a9a08-2561-4186-a486-c47a79c2019a-utilities\") pod \"certified-operators-dnw4r\" (UID: \"658a9a08-2561-4186-a486-c47a79c2019a\") " pod="openshift-marketplace/certified-operators-dnw4r" Dec 04 18:12:55 crc kubenswrapper[4631]: I1204 18:12:55.002408 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/658a9a08-2561-4186-a486-c47a79c2019a-utilities\") pod \"certified-operators-dnw4r\" (UID: \"658a9a08-2561-4186-a486-c47a79c2019a\") " pod="openshift-marketplace/certified-operators-dnw4r" Dec 04 18:12:55 crc kubenswrapper[4631]: I1204 18:12:55.002497 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/658a9a08-2561-4186-a486-c47a79c2019a-catalog-content\") pod \"certified-operators-dnw4r\" (UID: \"658a9a08-2561-4186-a486-c47a79c2019a\") " pod="openshift-marketplace/certified-operators-dnw4r" Dec 04 18:12:55 crc kubenswrapper[4631]: I1204 18:12:55.027510 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxkl9\" (UniqueName: \"kubernetes.io/projected/658a9a08-2561-4186-a486-c47a79c2019a-kube-api-access-lxkl9\") pod \"certified-operators-dnw4r\" (UID: \"658a9a08-2561-4186-a486-c47a79c2019a\") " pod="openshift-marketplace/certified-operators-dnw4r" Dec 04 18:12:55 crc kubenswrapper[4631]: I1204 18:12:55.103434 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dnw4r" Dec 04 18:12:55 crc kubenswrapper[4631]: I1204 18:12:55.755224 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dnw4r"] Dec 04 18:12:55 crc kubenswrapper[4631]: I1204 18:12:55.862564 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dnw4r" event={"ID":"658a9a08-2561-4186-a486-c47a79c2019a","Type":"ContainerStarted","Data":"ea41f0694d421882ffe12bb8091428aa78e1b087aceedb366377aa9000d6276c"} Dec 04 18:12:56 crc kubenswrapper[4631]: I1204 18:12:56.877281 4631 generic.go:334] "Generic (PLEG): container finished" podID="658a9a08-2561-4186-a486-c47a79c2019a" containerID="cac4de6a7c2d39cb12c34b45a4ee82a8b360449c23a51aef0c3d67e7698ef330" exitCode=0 Dec 04 18:12:56 crc kubenswrapper[4631]: I1204 18:12:56.877409 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dnw4r" event={"ID":"658a9a08-2561-4186-a486-c47a79c2019a","Type":"ContainerDied","Data":"cac4de6a7c2d39cb12c34b45a4ee82a8b360449c23a51aef0c3d67e7698ef330"} Dec 04 18:12:56 crc kubenswrapper[4631]: I1204 18:12:56.883751 4631 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 18:12:57 crc kubenswrapper[4631]: I1204 18:12:57.887198 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dnw4r" event={"ID":"658a9a08-2561-4186-a486-c47a79c2019a","Type":"ContainerStarted","Data":"9c8518657f48332d54a0f90530ea7d3fd8e4a66975dd05c408cc563580c9769a"} Dec 04 18:12:59 crc kubenswrapper[4631]: I1204 18:12:59.911184 4631 generic.go:334] "Generic (PLEG): container finished" podID="658a9a08-2561-4186-a486-c47a79c2019a" containerID="9c8518657f48332d54a0f90530ea7d3fd8e4a66975dd05c408cc563580c9769a" exitCode=0 Dec 04 18:12:59 crc kubenswrapper[4631]: I1204 18:12:59.911545 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dnw4r" event={"ID":"658a9a08-2561-4186-a486-c47a79c2019a","Type":"ContainerDied","Data":"9c8518657f48332d54a0f90530ea7d3fd8e4a66975dd05c408cc563580c9769a"} Dec 04 18:13:00 crc kubenswrapper[4631]: I1204 18:13:00.923087 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dnw4r" event={"ID":"658a9a08-2561-4186-a486-c47a79c2019a","Type":"ContainerStarted","Data":"080ec15e0ed0326aaa7eb6a53e232bc3c14b5616bd6651f9fcb716b811980f3d"} Dec 04 18:13:00 crc kubenswrapper[4631]: I1204 18:13:00.950156 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dnw4r" podStartSLOduration=3.551889483 podStartE2EDuration="6.950136379s" podCreationTimestamp="2025-12-04 18:12:54 +0000 UTC" firstStartedPulling="2025-12-04 18:12:56.883543299 +0000 UTC m=+2706.915785297" lastFinishedPulling="2025-12-04 18:13:00.281790205 +0000 UTC m=+2710.314032193" observedRunningTime="2025-12-04 18:13:00.949094219 +0000 UTC m=+2710.981336217" watchObservedRunningTime="2025-12-04 18:13:00.950136379 +0000 UTC m=+2710.982378387" Dec 04 18:13:05 crc kubenswrapper[4631]: I1204 18:13:05.104063 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dnw4r" Dec 04 18:13:05 crc kubenswrapper[4631]: I1204 18:13:05.105545 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dnw4r" Dec 04 18:13:05 crc kubenswrapper[4631]: I1204 18:13:05.150448 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dnw4r" Dec 04 18:13:06 crc kubenswrapper[4631]: I1204 18:13:06.015696 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dnw4r" Dec 04 18:13:06 crc kubenswrapper[4631]: I1204 18:13:06.022867 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:13:06 crc kubenswrapper[4631]: I1204 18:13:06.022908 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:13:06 crc kubenswrapper[4631]: I1204 18:13:06.064802 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dnw4r"] Dec 04 18:13:07 crc kubenswrapper[4631]: I1204 18:13:07.974533 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dnw4r" podUID="658a9a08-2561-4186-a486-c47a79c2019a" containerName="registry-server" containerID="cri-o://080ec15e0ed0326aaa7eb6a53e232bc3c14b5616bd6651f9fcb716b811980f3d" gracePeriod=2 Dec 04 18:13:08 crc kubenswrapper[4631]: I1204 18:13:08.468101 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dnw4r" Dec 04 18:13:08 crc kubenswrapper[4631]: I1204 18:13:08.536061 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/658a9a08-2561-4186-a486-c47a79c2019a-catalog-content\") pod \"658a9a08-2561-4186-a486-c47a79c2019a\" (UID: \"658a9a08-2561-4186-a486-c47a79c2019a\") " Dec 04 18:13:08 crc kubenswrapper[4631]: I1204 18:13:08.536138 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/658a9a08-2561-4186-a486-c47a79c2019a-utilities\") pod \"658a9a08-2561-4186-a486-c47a79c2019a\" (UID: \"658a9a08-2561-4186-a486-c47a79c2019a\") " Dec 04 18:13:08 crc kubenswrapper[4631]: I1204 18:13:08.536186 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxkl9\" (UniqueName: \"kubernetes.io/projected/658a9a08-2561-4186-a486-c47a79c2019a-kube-api-access-lxkl9\") pod \"658a9a08-2561-4186-a486-c47a79c2019a\" (UID: \"658a9a08-2561-4186-a486-c47a79c2019a\") " Dec 04 18:13:08 crc kubenswrapper[4631]: I1204 18:13:08.537218 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/658a9a08-2561-4186-a486-c47a79c2019a-utilities" (OuterVolumeSpecName: "utilities") pod "658a9a08-2561-4186-a486-c47a79c2019a" (UID: "658a9a08-2561-4186-a486-c47a79c2019a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:13:08 crc kubenswrapper[4631]: I1204 18:13:08.541172 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/658a9a08-2561-4186-a486-c47a79c2019a-kube-api-access-lxkl9" (OuterVolumeSpecName: "kube-api-access-lxkl9") pod "658a9a08-2561-4186-a486-c47a79c2019a" (UID: "658a9a08-2561-4186-a486-c47a79c2019a"). InnerVolumeSpecName "kube-api-access-lxkl9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:13:08 crc kubenswrapper[4631]: I1204 18:13:08.584661 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/658a9a08-2561-4186-a486-c47a79c2019a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "658a9a08-2561-4186-a486-c47a79c2019a" (UID: "658a9a08-2561-4186-a486-c47a79c2019a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:13:08 crc kubenswrapper[4631]: I1204 18:13:08.638744 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/658a9a08-2561-4186-a486-c47a79c2019a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:13:08 crc kubenswrapper[4631]: I1204 18:13:08.639039 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/658a9a08-2561-4186-a486-c47a79c2019a-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:13:08 crc kubenswrapper[4631]: I1204 18:13:08.639049 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxkl9\" (UniqueName: \"kubernetes.io/projected/658a9a08-2561-4186-a486-c47a79c2019a-kube-api-access-lxkl9\") on node \"crc\" DevicePath \"\"" Dec 04 18:13:08 crc kubenswrapper[4631]: I1204 18:13:08.985134 4631 generic.go:334] "Generic (PLEG): container finished" podID="658a9a08-2561-4186-a486-c47a79c2019a" containerID="080ec15e0ed0326aaa7eb6a53e232bc3c14b5616bd6651f9fcb716b811980f3d" exitCode=0 Dec 04 18:13:08 crc kubenswrapper[4631]: I1204 18:13:08.985181 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dnw4r" event={"ID":"658a9a08-2561-4186-a486-c47a79c2019a","Type":"ContainerDied","Data":"080ec15e0ed0326aaa7eb6a53e232bc3c14b5616bd6651f9fcb716b811980f3d"} Dec 04 18:13:08 crc kubenswrapper[4631]: I1204 18:13:08.985191 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dnw4r" Dec 04 18:13:08 crc kubenswrapper[4631]: I1204 18:13:08.985211 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dnw4r" event={"ID":"658a9a08-2561-4186-a486-c47a79c2019a","Type":"ContainerDied","Data":"ea41f0694d421882ffe12bb8091428aa78e1b087aceedb366377aa9000d6276c"} Dec 04 18:13:08 crc kubenswrapper[4631]: I1204 18:13:08.985230 4631 scope.go:117] "RemoveContainer" containerID="080ec15e0ed0326aaa7eb6a53e232bc3c14b5616bd6651f9fcb716b811980f3d" Dec 04 18:13:09 crc kubenswrapper[4631]: I1204 18:13:09.021234 4631 scope.go:117] "RemoveContainer" containerID="9c8518657f48332d54a0f90530ea7d3fd8e4a66975dd05c408cc563580c9769a" Dec 04 18:13:09 crc kubenswrapper[4631]: I1204 18:13:09.022847 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dnw4r"] Dec 04 18:13:09 crc kubenswrapper[4631]: I1204 18:13:09.032417 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dnw4r"] Dec 04 18:13:09 crc kubenswrapper[4631]: I1204 18:13:09.043539 4631 scope.go:117] "RemoveContainer" containerID="cac4de6a7c2d39cb12c34b45a4ee82a8b360449c23a51aef0c3d67e7698ef330" Dec 04 18:13:09 crc kubenswrapper[4631]: I1204 18:13:09.081339 4631 scope.go:117] "RemoveContainer" containerID="080ec15e0ed0326aaa7eb6a53e232bc3c14b5616bd6651f9fcb716b811980f3d" Dec 04 18:13:09 crc kubenswrapper[4631]: E1204 18:13:09.081718 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"080ec15e0ed0326aaa7eb6a53e232bc3c14b5616bd6651f9fcb716b811980f3d\": container with ID starting with 080ec15e0ed0326aaa7eb6a53e232bc3c14b5616bd6651f9fcb716b811980f3d not found: ID does not exist" containerID="080ec15e0ed0326aaa7eb6a53e232bc3c14b5616bd6651f9fcb716b811980f3d" Dec 04 18:13:09 crc kubenswrapper[4631]: I1204 18:13:09.081754 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"080ec15e0ed0326aaa7eb6a53e232bc3c14b5616bd6651f9fcb716b811980f3d"} err="failed to get container status \"080ec15e0ed0326aaa7eb6a53e232bc3c14b5616bd6651f9fcb716b811980f3d\": rpc error: code = NotFound desc = could not find container \"080ec15e0ed0326aaa7eb6a53e232bc3c14b5616bd6651f9fcb716b811980f3d\": container with ID starting with 080ec15e0ed0326aaa7eb6a53e232bc3c14b5616bd6651f9fcb716b811980f3d not found: ID does not exist" Dec 04 18:13:09 crc kubenswrapper[4631]: I1204 18:13:09.081777 4631 scope.go:117] "RemoveContainer" containerID="9c8518657f48332d54a0f90530ea7d3fd8e4a66975dd05c408cc563580c9769a" Dec 04 18:13:09 crc kubenswrapper[4631]: E1204 18:13:09.082012 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c8518657f48332d54a0f90530ea7d3fd8e4a66975dd05c408cc563580c9769a\": container with ID starting with 9c8518657f48332d54a0f90530ea7d3fd8e4a66975dd05c408cc563580c9769a not found: ID does not exist" containerID="9c8518657f48332d54a0f90530ea7d3fd8e4a66975dd05c408cc563580c9769a" Dec 04 18:13:09 crc kubenswrapper[4631]: I1204 18:13:09.082037 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c8518657f48332d54a0f90530ea7d3fd8e4a66975dd05c408cc563580c9769a"} err="failed to get container status \"9c8518657f48332d54a0f90530ea7d3fd8e4a66975dd05c408cc563580c9769a\": rpc error: code = NotFound desc = could not find container \"9c8518657f48332d54a0f90530ea7d3fd8e4a66975dd05c408cc563580c9769a\": container with ID starting with 9c8518657f48332d54a0f90530ea7d3fd8e4a66975dd05c408cc563580c9769a not found: ID does not exist" Dec 04 18:13:09 crc kubenswrapper[4631]: I1204 18:13:09.082054 4631 scope.go:117] "RemoveContainer" containerID="cac4de6a7c2d39cb12c34b45a4ee82a8b360449c23a51aef0c3d67e7698ef330" Dec 04 18:13:09 crc kubenswrapper[4631]: E1204 18:13:09.082255 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cac4de6a7c2d39cb12c34b45a4ee82a8b360449c23a51aef0c3d67e7698ef330\": container with ID starting with cac4de6a7c2d39cb12c34b45a4ee82a8b360449c23a51aef0c3d67e7698ef330 not found: ID does not exist" containerID="cac4de6a7c2d39cb12c34b45a4ee82a8b360449c23a51aef0c3d67e7698ef330" Dec 04 18:13:09 crc kubenswrapper[4631]: I1204 18:13:09.082276 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cac4de6a7c2d39cb12c34b45a4ee82a8b360449c23a51aef0c3d67e7698ef330"} err="failed to get container status \"cac4de6a7c2d39cb12c34b45a4ee82a8b360449c23a51aef0c3d67e7698ef330\": rpc error: code = NotFound desc = could not find container \"cac4de6a7c2d39cb12c34b45a4ee82a8b360449c23a51aef0c3d67e7698ef330\": container with ID starting with cac4de6a7c2d39cb12c34b45a4ee82a8b360449c23a51aef0c3d67e7698ef330 not found: ID does not exist" Dec 04 18:13:10 crc kubenswrapper[4631]: I1204 18:13:10.248528 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="658a9a08-2561-4186-a486-c47a79c2019a" path="/var/lib/kubelet/pods/658a9a08-2561-4186-a486-c47a79c2019a/volumes" Dec 04 18:13:36 crc kubenswrapper[4631]: I1204 18:13:36.023061 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:13:36 crc kubenswrapper[4631]: I1204 18:13:36.023670 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:13:36 crc kubenswrapper[4631]: I1204 18:13:36.023722 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 18:13:36 crc kubenswrapper[4631]: I1204 18:13:36.024500 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6cf6e79e6d18aca329f54327d5530379d0ca975dbed57eaeda5328e285791625"} pod="openshift-machine-config-operator/machine-config-daemon-q27wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 18:13:36 crc kubenswrapper[4631]: I1204 18:13:36.024553 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" containerID="cri-o://6cf6e79e6d18aca329f54327d5530379d0ca975dbed57eaeda5328e285791625" gracePeriod=600 Dec 04 18:13:36 crc kubenswrapper[4631]: I1204 18:13:36.615633 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerID="6cf6e79e6d18aca329f54327d5530379d0ca975dbed57eaeda5328e285791625" exitCode=0 Dec 04 18:13:36 crc kubenswrapper[4631]: I1204 18:13:36.615687 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerDied","Data":"6cf6e79e6d18aca329f54327d5530379d0ca975dbed57eaeda5328e285791625"} Dec 04 18:13:36 crc kubenswrapper[4631]: I1204 18:13:36.616582 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1"} Dec 04 18:13:36 crc kubenswrapper[4631]: I1204 18:13:36.616622 4631 scope.go:117] "RemoveContainer" containerID="f46c25180b125b39a595a2ebf659f8a9e73ab133bdc53e100ed05e0138c805a6" Dec 04 18:13:46 crc kubenswrapper[4631]: I1204 18:13:46.719628 4631 generic.go:334] "Generic (PLEG): container finished" podID="d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec" containerID="15b964b2883fa9b1b94c4a70018f11a785aa292aaa517fc3bfbacf01d5151b84" exitCode=0 Dec 04 18:13:46 crc kubenswrapper[4631]: I1204 18:13:46.719890 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" event={"ID":"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec","Type":"ContainerDied","Data":"15b964b2883fa9b1b94c4a70018f11a785aa292aaa517fc3bfbacf01d5151b84"} Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.170063 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.333910 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-inventory\") pod \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.334205 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-libvirt-combined-ca-bundle\") pod \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.334329 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-ssh-key\") pod \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.334426 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-libvirt-secret-0\") pod \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.334457 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwxrq\" (UniqueName: \"kubernetes.io/projected/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-kube-api-access-rwxrq\") pod \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\" (UID: \"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec\") " Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.339694 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-kube-api-access-rwxrq" (OuterVolumeSpecName: "kube-api-access-rwxrq") pod "d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec" (UID: "d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec"). InnerVolumeSpecName "kube-api-access-rwxrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.340171 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec" (UID: "d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.359676 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec" (UID: "d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.366062 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec" (UID: "d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.369117 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-inventory" (OuterVolumeSpecName: "inventory") pod "d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec" (UID: "d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.437095 4631 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.437135 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwxrq\" (UniqueName: \"kubernetes.io/projected/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-kube-api-access-rwxrq\") on node \"crc\" DevicePath \"\"" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.437150 4631 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.437165 4631 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.437179 4631 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.737758 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" event={"ID":"d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec","Type":"ContainerDied","Data":"9659af08cac86542a61ab481a025314a75074285356792202267ded21a099525"} Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.738334 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9659af08cac86542a61ab481a025314a75074285356792202267ded21a099525" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.737813 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.834153 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs"] Dec 04 18:13:48 crc kubenswrapper[4631]: E1204 18:13:48.834615 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="658a9a08-2561-4186-a486-c47a79c2019a" containerName="extract-content" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.834642 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="658a9a08-2561-4186-a486-c47a79c2019a" containerName="extract-content" Dec 04 18:13:48 crc kubenswrapper[4631]: E1204 18:13:48.834661 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="658a9a08-2561-4186-a486-c47a79c2019a" containerName="extract-utilities" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.834669 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="658a9a08-2561-4186-a486-c47a79c2019a" containerName="extract-utilities" Dec 04 18:13:48 crc kubenswrapper[4631]: E1204 18:13:48.834712 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.834721 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 04 18:13:48 crc kubenswrapper[4631]: E1204 18:13:48.834737 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="658a9a08-2561-4186-a486-c47a79c2019a" containerName="registry-server" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.834744 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="658a9a08-2561-4186-a486-c47a79c2019a" containerName="registry-server" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.834963 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.834995 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="658a9a08-2561-4186-a486-c47a79c2019a" containerName="registry-server" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.835712 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.838622 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.845092 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.845408 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.845606 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.845639 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.845680 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.851432 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-9284p" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.860007 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs"] Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.948358 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.948486 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.948521 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.948552 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.948611 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpnfm\" (UniqueName: \"kubernetes.io/projected/93cd2870-edd3-4b7f-9868-6c437dcf3164-kube-api-access-lpnfm\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.948640 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.948667 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.948706 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:48 crc kubenswrapper[4631]: I1204 18:13:48.948742 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.050949 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.051045 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.051135 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpnfm\" (UniqueName: \"kubernetes.io/projected/93cd2870-edd3-4b7f-9868-6c437dcf3164-kube-api-access-lpnfm\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.051874 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.051906 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.051962 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.052002 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.052027 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.052087 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.053156 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.055551 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.057117 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.057241 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.058062 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.059016 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.060079 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.066639 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.084924 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpnfm\" (UniqueName: \"kubernetes.io/projected/93cd2870-edd3-4b7f-9868-6c437dcf3164-kube-api-access-lpnfm\") pod \"nova-edpm-deployment-openstack-edpm-ipam-lqlcs\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.155402 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:13:49 crc kubenswrapper[4631]: I1204 18:13:49.944632 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs"] Dec 04 18:13:50 crc kubenswrapper[4631]: I1204 18:13:50.757092 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" event={"ID":"93cd2870-edd3-4b7f-9868-6c437dcf3164","Type":"ContainerStarted","Data":"e49c15333fd564d25247cd14a1bcc83407b6fd5e30b56b83cf4aa808c1d41b18"} Dec 04 18:13:50 crc kubenswrapper[4631]: I1204 18:13:50.757507 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" event={"ID":"93cd2870-edd3-4b7f-9868-6c437dcf3164","Type":"ContainerStarted","Data":"74b9eb10b728a974e624fcc612e362bb68c76f126f1687da9a3f56478b860ebe"} Dec 04 18:13:50 crc kubenswrapper[4631]: I1204 18:13:50.786113 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" podStartSLOduration=2.598307821 podStartE2EDuration="2.78609359s" podCreationTimestamp="2025-12-04 18:13:48 +0000 UTC" firstStartedPulling="2025-12-04 18:13:49.950280474 +0000 UTC m=+2759.982522472" lastFinishedPulling="2025-12-04 18:13:50.138066243 +0000 UTC m=+2760.170308241" observedRunningTime="2025-12-04 18:13:50.776255389 +0000 UTC m=+2760.808497387" watchObservedRunningTime="2025-12-04 18:13:50.78609359 +0000 UTC m=+2760.818335588" Dec 04 18:15:00 crc kubenswrapper[4631]: I1204 18:15:00.144846 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9"] Dec 04 18:15:00 crc kubenswrapper[4631]: I1204 18:15:00.147010 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9" Dec 04 18:15:00 crc kubenswrapper[4631]: I1204 18:15:00.148739 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 04 18:15:00 crc kubenswrapper[4631]: I1204 18:15:00.149052 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 04 18:15:00 crc kubenswrapper[4631]: I1204 18:15:00.155555 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ljck\" (UniqueName: \"kubernetes.io/projected/d7e19c3d-d09f-44f8-9f35-8f30eb34904e-kube-api-access-7ljck\") pod \"collect-profiles-29414535-fjph9\" (UID: \"d7e19c3d-d09f-44f8-9f35-8f30eb34904e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9" Dec 04 18:15:00 crc kubenswrapper[4631]: I1204 18:15:00.155613 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d7e19c3d-d09f-44f8-9f35-8f30eb34904e-config-volume\") pod \"collect-profiles-29414535-fjph9\" (UID: \"d7e19c3d-d09f-44f8-9f35-8f30eb34904e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9" Dec 04 18:15:00 crc kubenswrapper[4631]: I1204 18:15:00.155636 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d7e19c3d-d09f-44f8-9f35-8f30eb34904e-secret-volume\") pod \"collect-profiles-29414535-fjph9\" (UID: \"d7e19c3d-d09f-44f8-9f35-8f30eb34904e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9" Dec 04 18:15:00 crc kubenswrapper[4631]: I1204 18:15:00.159512 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9"] Dec 04 18:15:00 crc kubenswrapper[4631]: I1204 18:15:00.256519 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ljck\" (UniqueName: \"kubernetes.io/projected/d7e19c3d-d09f-44f8-9f35-8f30eb34904e-kube-api-access-7ljck\") pod \"collect-profiles-29414535-fjph9\" (UID: \"d7e19c3d-d09f-44f8-9f35-8f30eb34904e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9" Dec 04 18:15:00 crc kubenswrapper[4631]: I1204 18:15:00.256573 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d7e19c3d-d09f-44f8-9f35-8f30eb34904e-config-volume\") pod \"collect-profiles-29414535-fjph9\" (UID: \"d7e19c3d-d09f-44f8-9f35-8f30eb34904e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9" Dec 04 18:15:00 crc kubenswrapper[4631]: I1204 18:15:00.256597 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d7e19c3d-d09f-44f8-9f35-8f30eb34904e-secret-volume\") pod \"collect-profiles-29414535-fjph9\" (UID: \"d7e19c3d-d09f-44f8-9f35-8f30eb34904e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9" Dec 04 18:15:00 crc kubenswrapper[4631]: I1204 18:15:00.257788 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d7e19c3d-d09f-44f8-9f35-8f30eb34904e-config-volume\") pod \"collect-profiles-29414535-fjph9\" (UID: \"d7e19c3d-d09f-44f8-9f35-8f30eb34904e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9" Dec 04 18:15:00 crc kubenswrapper[4631]: I1204 18:15:00.273581 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d7e19c3d-d09f-44f8-9f35-8f30eb34904e-secret-volume\") pod \"collect-profiles-29414535-fjph9\" (UID: \"d7e19c3d-d09f-44f8-9f35-8f30eb34904e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9" Dec 04 18:15:00 crc kubenswrapper[4631]: I1204 18:15:00.277387 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ljck\" (UniqueName: \"kubernetes.io/projected/d7e19c3d-d09f-44f8-9f35-8f30eb34904e-kube-api-access-7ljck\") pod \"collect-profiles-29414535-fjph9\" (UID: \"d7e19c3d-d09f-44f8-9f35-8f30eb34904e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9" Dec 04 18:15:00 crc kubenswrapper[4631]: I1204 18:15:00.518064 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9" Dec 04 18:15:00 crc kubenswrapper[4631]: I1204 18:15:00.943764 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9"] Dec 04 18:15:01 crc kubenswrapper[4631]: I1204 18:15:01.771784 4631 generic.go:334] "Generic (PLEG): container finished" podID="d7e19c3d-d09f-44f8-9f35-8f30eb34904e" containerID="510ef3a11cb27425c86a50c66f257e70cc99033c042c1e9fefa2128ab5419c25" exitCode=0 Dec 04 18:15:01 crc kubenswrapper[4631]: I1204 18:15:01.771997 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9" event={"ID":"d7e19c3d-d09f-44f8-9f35-8f30eb34904e","Type":"ContainerDied","Data":"510ef3a11cb27425c86a50c66f257e70cc99033c042c1e9fefa2128ab5419c25"} Dec 04 18:15:01 crc kubenswrapper[4631]: I1204 18:15:01.772243 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9" event={"ID":"d7e19c3d-d09f-44f8-9f35-8f30eb34904e","Type":"ContainerStarted","Data":"35c99054ea135cce6fb4cd88931008f7e2d52e762dd5b56c3bd72053dea5360a"} Dec 04 18:15:03 crc kubenswrapper[4631]: I1204 18:15:03.126581 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9" Dec 04 18:15:03 crc kubenswrapper[4631]: I1204 18:15:03.211559 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ljck\" (UniqueName: \"kubernetes.io/projected/d7e19c3d-d09f-44f8-9f35-8f30eb34904e-kube-api-access-7ljck\") pod \"d7e19c3d-d09f-44f8-9f35-8f30eb34904e\" (UID: \"d7e19c3d-d09f-44f8-9f35-8f30eb34904e\") " Dec 04 18:15:03 crc kubenswrapper[4631]: I1204 18:15:03.212444 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d7e19c3d-d09f-44f8-9f35-8f30eb34904e-config-volume\") pod \"d7e19c3d-d09f-44f8-9f35-8f30eb34904e\" (UID: \"d7e19c3d-d09f-44f8-9f35-8f30eb34904e\") " Dec 04 18:15:03 crc kubenswrapper[4631]: I1204 18:15:03.212513 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d7e19c3d-d09f-44f8-9f35-8f30eb34904e-secret-volume\") pod \"d7e19c3d-d09f-44f8-9f35-8f30eb34904e\" (UID: \"d7e19c3d-d09f-44f8-9f35-8f30eb34904e\") " Dec 04 18:15:03 crc kubenswrapper[4631]: I1204 18:15:03.218211 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7e19c3d-d09f-44f8-9f35-8f30eb34904e-config-volume" (OuterVolumeSpecName: "config-volume") pod "d7e19c3d-d09f-44f8-9f35-8f30eb34904e" (UID: "d7e19c3d-d09f-44f8-9f35-8f30eb34904e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 18:15:03 crc kubenswrapper[4631]: I1204 18:15:03.230987 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7e19c3d-d09f-44f8-9f35-8f30eb34904e-kube-api-access-7ljck" (OuterVolumeSpecName: "kube-api-access-7ljck") pod "d7e19c3d-d09f-44f8-9f35-8f30eb34904e" (UID: "d7e19c3d-d09f-44f8-9f35-8f30eb34904e"). InnerVolumeSpecName "kube-api-access-7ljck". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:15:03 crc kubenswrapper[4631]: I1204 18:15:03.231106 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7e19c3d-d09f-44f8-9f35-8f30eb34904e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d7e19c3d-d09f-44f8-9f35-8f30eb34904e" (UID: "d7e19c3d-d09f-44f8-9f35-8f30eb34904e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:15:03 crc kubenswrapper[4631]: I1204 18:15:03.316839 4631 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d7e19c3d-d09f-44f8-9f35-8f30eb34904e-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 04 18:15:03 crc kubenswrapper[4631]: I1204 18:15:03.316882 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ljck\" (UniqueName: \"kubernetes.io/projected/d7e19c3d-d09f-44f8-9f35-8f30eb34904e-kube-api-access-7ljck\") on node \"crc\" DevicePath \"\"" Dec 04 18:15:03 crc kubenswrapper[4631]: I1204 18:15:03.316894 4631 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d7e19c3d-d09f-44f8-9f35-8f30eb34904e-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 18:15:03 crc kubenswrapper[4631]: I1204 18:15:03.789142 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9" event={"ID":"d7e19c3d-d09f-44f8-9f35-8f30eb34904e","Type":"ContainerDied","Data":"35c99054ea135cce6fb4cd88931008f7e2d52e762dd5b56c3bd72053dea5360a"} Dec 04 18:15:03 crc kubenswrapper[4631]: I1204 18:15:03.789181 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35c99054ea135cce6fb4cd88931008f7e2d52e762dd5b56c3bd72053dea5360a" Dec 04 18:15:03 crc kubenswrapper[4631]: I1204 18:15:03.789231 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9" Dec 04 18:15:04 crc kubenswrapper[4631]: I1204 18:15:04.209267 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs"] Dec 04 18:15:04 crc kubenswrapper[4631]: I1204 18:15:04.216615 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414490-4s8gs"] Dec 04 18:15:04 crc kubenswrapper[4631]: I1204 18:15:04.249730 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4212359b-081e-4b11-8ca7-87cb9ff33a1c" path="/var/lib/kubelet/pods/4212359b-081e-4b11-8ca7-87cb9ff33a1c/volumes" Dec 04 18:15:23 crc kubenswrapper[4631]: I1204 18:15:23.463181 4631 scope.go:117] "RemoveContainer" containerID="72e4cb61ae27c39b1fe5d633cd183b6c90f27c95b6163743193fa3f2cf45de48" Dec 04 18:15:36 crc kubenswrapper[4631]: I1204 18:15:36.023059 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:15:36 crc kubenswrapper[4631]: I1204 18:15:36.023690 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:15:42 crc kubenswrapper[4631]: I1204 18:15:42.439502 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5d6vz"] Dec 04 18:15:42 crc kubenswrapper[4631]: E1204 18:15:42.440539 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7e19c3d-d09f-44f8-9f35-8f30eb34904e" containerName="collect-profiles" Dec 04 18:15:42 crc kubenswrapper[4631]: I1204 18:15:42.440557 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7e19c3d-d09f-44f8-9f35-8f30eb34904e" containerName="collect-profiles" Dec 04 18:15:42 crc kubenswrapper[4631]: I1204 18:15:42.440839 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7e19c3d-d09f-44f8-9f35-8f30eb34904e" containerName="collect-profiles" Dec 04 18:15:42 crc kubenswrapper[4631]: I1204 18:15:42.442532 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5d6vz" Dec 04 18:15:42 crc kubenswrapper[4631]: I1204 18:15:42.461133 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lnlx\" (UniqueName: \"kubernetes.io/projected/15b36d30-2c6d-4df7-ae40-1e6c440e7438-kube-api-access-4lnlx\") pod \"redhat-marketplace-5d6vz\" (UID: \"15b36d30-2c6d-4df7-ae40-1e6c440e7438\") " pod="openshift-marketplace/redhat-marketplace-5d6vz" Dec 04 18:15:42 crc kubenswrapper[4631]: I1204 18:15:42.461406 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15b36d30-2c6d-4df7-ae40-1e6c440e7438-utilities\") pod \"redhat-marketplace-5d6vz\" (UID: \"15b36d30-2c6d-4df7-ae40-1e6c440e7438\") " pod="openshift-marketplace/redhat-marketplace-5d6vz" Dec 04 18:15:42 crc kubenswrapper[4631]: I1204 18:15:42.461676 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15b36d30-2c6d-4df7-ae40-1e6c440e7438-catalog-content\") pod \"redhat-marketplace-5d6vz\" (UID: \"15b36d30-2c6d-4df7-ae40-1e6c440e7438\") " pod="openshift-marketplace/redhat-marketplace-5d6vz" Dec 04 18:15:42 crc kubenswrapper[4631]: I1204 18:15:42.470672 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5d6vz"] Dec 04 18:15:42 crc kubenswrapper[4631]: I1204 18:15:42.563661 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15b36d30-2c6d-4df7-ae40-1e6c440e7438-catalog-content\") pod \"redhat-marketplace-5d6vz\" (UID: \"15b36d30-2c6d-4df7-ae40-1e6c440e7438\") " pod="openshift-marketplace/redhat-marketplace-5d6vz" Dec 04 18:15:42 crc kubenswrapper[4631]: I1204 18:15:42.563830 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15b36d30-2c6d-4df7-ae40-1e6c440e7438-utilities\") pod \"redhat-marketplace-5d6vz\" (UID: \"15b36d30-2c6d-4df7-ae40-1e6c440e7438\") " pod="openshift-marketplace/redhat-marketplace-5d6vz" Dec 04 18:15:42 crc kubenswrapper[4631]: I1204 18:15:42.563855 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lnlx\" (UniqueName: \"kubernetes.io/projected/15b36d30-2c6d-4df7-ae40-1e6c440e7438-kube-api-access-4lnlx\") pod \"redhat-marketplace-5d6vz\" (UID: \"15b36d30-2c6d-4df7-ae40-1e6c440e7438\") " pod="openshift-marketplace/redhat-marketplace-5d6vz" Dec 04 18:15:42 crc kubenswrapper[4631]: I1204 18:15:42.564193 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15b36d30-2c6d-4df7-ae40-1e6c440e7438-catalog-content\") pod \"redhat-marketplace-5d6vz\" (UID: \"15b36d30-2c6d-4df7-ae40-1e6c440e7438\") " pod="openshift-marketplace/redhat-marketplace-5d6vz" Dec 04 18:15:42 crc kubenswrapper[4631]: I1204 18:15:42.564273 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15b36d30-2c6d-4df7-ae40-1e6c440e7438-utilities\") pod \"redhat-marketplace-5d6vz\" (UID: \"15b36d30-2c6d-4df7-ae40-1e6c440e7438\") " pod="openshift-marketplace/redhat-marketplace-5d6vz" Dec 04 18:15:42 crc kubenswrapper[4631]: I1204 18:15:42.587455 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lnlx\" (UniqueName: \"kubernetes.io/projected/15b36d30-2c6d-4df7-ae40-1e6c440e7438-kube-api-access-4lnlx\") pod \"redhat-marketplace-5d6vz\" (UID: \"15b36d30-2c6d-4df7-ae40-1e6c440e7438\") " pod="openshift-marketplace/redhat-marketplace-5d6vz" Dec 04 18:15:42 crc kubenswrapper[4631]: I1204 18:15:42.771919 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5d6vz" Dec 04 18:15:43 crc kubenswrapper[4631]: I1204 18:15:43.427345 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5d6vz"] Dec 04 18:15:44 crc kubenswrapper[4631]: I1204 18:15:44.180456 4631 generic.go:334] "Generic (PLEG): container finished" podID="15b36d30-2c6d-4df7-ae40-1e6c440e7438" containerID="3cdae97fc6a75a2c6d8cd2ec9f5502fad91f0d81b82bec3c51c8cce0004e8754" exitCode=0 Dec 04 18:15:44 crc kubenswrapper[4631]: I1204 18:15:44.180521 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5d6vz" event={"ID":"15b36d30-2c6d-4df7-ae40-1e6c440e7438","Type":"ContainerDied","Data":"3cdae97fc6a75a2c6d8cd2ec9f5502fad91f0d81b82bec3c51c8cce0004e8754"} Dec 04 18:15:44 crc kubenswrapper[4631]: I1204 18:15:44.180682 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5d6vz" event={"ID":"15b36d30-2c6d-4df7-ae40-1e6c440e7438","Type":"ContainerStarted","Data":"1427f5f0d1d532c3da66efed60d5c58a2e01173cd5b03c5aeb238e6afd1873e3"} Dec 04 18:15:45 crc kubenswrapper[4631]: I1204 18:15:45.194639 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5d6vz" event={"ID":"15b36d30-2c6d-4df7-ae40-1e6c440e7438","Type":"ContainerStarted","Data":"1eb6732cb392f1b33f69004d2ab7d4c5112ac06b93a2d11f020b4e37f932b6d2"} Dec 04 18:15:46 crc kubenswrapper[4631]: I1204 18:15:46.203887 4631 generic.go:334] "Generic (PLEG): container finished" podID="15b36d30-2c6d-4df7-ae40-1e6c440e7438" containerID="1eb6732cb392f1b33f69004d2ab7d4c5112ac06b93a2d11f020b4e37f932b6d2" exitCode=0 Dec 04 18:15:46 crc kubenswrapper[4631]: I1204 18:15:46.203930 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5d6vz" event={"ID":"15b36d30-2c6d-4df7-ae40-1e6c440e7438","Type":"ContainerDied","Data":"1eb6732cb392f1b33f69004d2ab7d4c5112ac06b93a2d11f020b4e37f932b6d2"} Dec 04 18:15:47 crc kubenswrapper[4631]: I1204 18:15:47.221106 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5d6vz" event={"ID":"15b36d30-2c6d-4df7-ae40-1e6c440e7438","Type":"ContainerStarted","Data":"07811d90c69c945d693d10b5964323333f2755c72eb50455a65519dcef24b055"} Dec 04 18:15:52 crc kubenswrapper[4631]: I1204 18:15:52.773656 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5d6vz" Dec 04 18:15:52 crc kubenswrapper[4631]: I1204 18:15:52.774267 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5d6vz" Dec 04 18:15:52 crc kubenswrapper[4631]: I1204 18:15:52.826322 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5d6vz" Dec 04 18:15:52 crc kubenswrapper[4631]: I1204 18:15:52.862920 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5d6vz" podStartSLOduration=8.433882987 podStartE2EDuration="10.862896546s" podCreationTimestamp="2025-12-04 18:15:42 +0000 UTC" firstStartedPulling="2025-12-04 18:15:44.18273838 +0000 UTC m=+2874.214980378" lastFinishedPulling="2025-12-04 18:15:46.611751939 +0000 UTC m=+2876.643993937" observedRunningTime="2025-12-04 18:15:47.241128038 +0000 UTC m=+2877.273370036" watchObservedRunningTime="2025-12-04 18:15:52.862896546 +0000 UTC m=+2882.895138554" Dec 04 18:15:53 crc kubenswrapper[4631]: I1204 18:15:53.344251 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5d6vz" Dec 04 18:15:53 crc kubenswrapper[4631]: I1204 18:15:53.390309 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5d6vz"] Dec 04 18:15:55 crc kubenswrapper[4631]: I1204 18:15:55.313819 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5d6vz" podUID="15b36d30-2c6d-4df7-ae40-1e6c440e7438" containerName="registry-server" containerID="cri-o://07811d90c69c945d693d10b5964323333f2755c72eb50455a65519dcef24b055" gracePeriod=2 Dec 04 18:15:55 crc kubenswrapper[4631]: I1204 18:15:55.793962 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5d6vz" Dec 04 18:15:55 crc kubenswrapper[4631]: I1204 18:15:55.849041 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15b36d30-2c6d-4df7-ae40-1e6c440e7438-utilities\") pod \"15b36d30-2c6d-4df7-ae40-1e6c440e7438\" (UID: \"15b36d30-2c6d-4df7-ae40-1e6c440e7438\") " Dec 04 18:15:55 crc kubenswrapper[4631]: I1204 18:15:55.849792 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15b36d30-2c6d-4df7-ae40-1e6c440e7438-utilities" (OuterVolumeSpecName: "utilities") pod "15b36d30-2c6d-4df7-ae40-1e6c440e7438" (UID: "15b36d30-2c6d-4df7-ae40-1e6c440e7438"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:15:55 crc kubenswrapper[4631]: I1204 18:15:55.951177 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15b36d30-2c6d-4df7-ae40-1e6c440e7438-catalog-content\") pod \"15b36d30-2c6d-4df7-ae40-1e6c440e7438\" (UID: \"15b36d30-2c6d-4df7-ae40-1e6c440e7438\") " Dec 04 18:15:55 crc kubenswrapper[4631]: I1204 18:15:55.951219 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4lnlx\" (UniqueName: \"kubernetes.io/projected/15b36d30-2c6d-4df7-ae40-1e6c440e7438-kube-api-access-4lnlx\") pod \"15b36d30-2c6d-4df7-ae40-1e6c440e7438\" (UID: \"15b36d30-2c6d-4df7-ae40-1e6c440e7438\") " Dec 04 18:15:55 crc kubenswrapper[4631]: I1204 18:15:55.951617 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15b36d30-2c6d-4df7-ae40-1e6c440e7438-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:15:55 crc kubenswrapper[4631]: I1204 18:15:55.959232 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15b36d30-2c6d-4df7-ae40-1e6c440e7438-kube-api-access-4lnlx" (OuterVolumeSpecName: "kube-api-access-4lnlx") pod "15b36d30-2c6d-4df7-ae40-1e6c440e7438" (UID: "15b36d30-2c6d-4df7-ae40-1e6c440e7438"). InnerVolumeSpecName "kube-api-access-4lnlx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:15:55 crc kubenswrapper[4631]: I1204 18:15:55.976776 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15b36d30-2c6d-4df7-ae40-1e6c440e7438-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "15b36d30-2c6d-4df7-ae40-1e6c440e7438" (UID: "15b36d30-2c6d-4df7-ae40-1e6c440e7438"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:15:56 crc kubenswrapper[4631]: I1204 18:15:56.053389 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15b36d30-2c6d-4df7-ae40-1e6c440e7438-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:15:56 crc kubenswrapper[4631]: I1204 18:15:56.053420 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4lnlx\" (UniqueName: \"kubernetes.io/projected/15b36d30-2c6d-4df7-ae40-1e6c440e7438-kube-api-access-4lnlx\") on node \"crc\" DevicePath \"\"" Dec 04 18:15:56 crc kubenswrapper[4631]: I1204 18:15:56.322444 4631 generic.go:334] "Generic (PLEG): container finished" podID="15b36d30-2c6d-4df7-ae40-1e6c440e7438" containerID="07811d90c69c945d693d10b5964323333f2755c72eb50455a65519dcef24b055" exitCode=0 Dec 04 18:15:56 crc kubenswrapper[4631]: I1204 18:15:56.322483 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5d6vz" event={"ID":"15b36d30-2c6d-4df7-ae40-1e6c440e7438","Type":"ContainerDied","Data":"07811d90c69c945d693d10b5964323333f2755c72eb50455a65519dcef24b055"} Dec 04 18:15:56 crc kubenswrapper[4631]: I1204 18:15:56.322509 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5d6vz" event={"ID":"15b36d30-2c6d-4df7-ae40-1e6c440e7438","Type":"ContainerDied","Data":"1427f5f0d1d532c3da66efed60d5c58a2e01173cd5b03c5aeb238e6afd1873e3"} Dec 04 18:15:56 crc kubenswrapper[4631]: I1204 18:15:56.322525 4631 scope.go:117] "RemoveContainer" containerID="07811d90c69c945d693d10b5964323333f2755c72eb50455a65519dcef24b055" Dec 04 18:15:56 crc kubenswrapper[4631]: I1204 18:15:56.322654 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5d6vz" Dec 04 18:15:56 crc kubenswrapper[4631]: I1204 18:15:56.348233 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5d6vz"] Dec 04 18:15:56 crc kubenswrapper[4631]: I1204 18:15:56.356847 4631 scope.go:117] "RemoveContainer" containerID="1eb6732cb392f1b33f69004d2ab7d4c5112ac06b93a2d11f020b4e37f932b6d2" Dec 04 18:15:56 crc kubenswrapper[4631]: I1204 18:15:56.358955 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5d6vz"] Dec 04 18:15:56 crc kubenswrapper[4631]: I1204 18:15:56.377879 4631 scope.go:117] "RemoveContainer" containerID="3cdae97fc6a75a2c6d8cd2ec9f5502fad91f0d81b82bec3c51c8cce0004e8754" Dec 04 18:15:56 crc kubenswrapper[4631]: I1204 18:15:56.431184 4631 scope.go:117] "RemoveContainer" containerID="07811d90c69c945d693d10b5964323333f2755c72eb50455a65519dcef24b055" Dec 04 18:15:56 crc kubenswrapper[4631]: E1204 18:15:56.433074 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07811d90c69c945d693d10b5964323333f2755c72eb50455a65519dcef24b055\": container with ID starting with 07811d90c69c945d693d10b5964323333f2755c72eb50455a65519dcef24b055 not found: ID does not exist" containerID="07811d90c69c945d693d10b5964323333f2755c72eb50455a65519dcef24b055" Dec 04 18:15:56 crc kubenswrapper[4631]: I1204 18:15:56.433104 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07811d90c69c945d693d10b5964323333f2755c72eb50455a65519dcef24b055"} err="failed to get container status \"07811d90c69c945d693d10b5964323333f2755c72eb50455a65519dcef24b055\": rpc error: code = NotFound desc = could not find container \"07811d90c69c945d693d10b5964323333f2755c72eb50455a65519dcef24b055\": container with ID starting with 07811d90c69c945d693d10b5964323333f2755c72eb50455a65519dcef24b055 not found: ID does not exist" Dec 04 18:15:56 crc kubenswrapper[4631]: I1204 18:15:56.433127 4631 scope.go:117] "RemoveContainer" containerID="1eb6732cb392f1b33f69004d2ab7d4c5112ac06b93a2d11f020b4e37f932b6d2" Dec 04 18:15:56 crc kubenswrapper[4631]: E1204 18:15:56.433324 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1eb6732cb392f1b33f69004d2ab7d4c5112ac06b93a2d11f020b4e37f932b6d2\": container with ID starting with 1eb6732cb392f1b33f69004d2ab7d4c5112ac06b93a2d11f020b4e37f932b6d2 not found: ID does not exist" containerID="1eb6732cb392f1b33f69004d2ab7d4c5112ac06b93a2d11f020b4e37f932b6d2" Dec 04 18:15:56 crc kubenswrapper[4631]: I1204 18:15:56.433342 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1eb6732cb392f1b33f69004d2ab7d4c5112ac06b93a2d11f020b4e37f932b6d2"} err="failed to get container status \"1eb6732cb392f1b33f69004d2ab7d4c5112ac06b93a2d11f020b4e37f932b6d2\": rpc error: code = NotFound desc = could not find container \"1eb6732cb392f1b33f69004d2ab7d4c5112ac06b93a2d11f020b4e37f932b6d2\": container with ID starting with 1eb6732cb392f1b33f69004d2ab7d4c5112ac06b93a2d11f020b4e37f932b6d2 not found: ID does not exist" Dec 04 18:15:56 crc kubenswrapper[4631]: I1204 18:15:56.433365 4631 scope.go:117] "RemoveContainer" containerID="3cdae97fc6a75a2c6d8cd2ec9f5502fad91f0d81b82bec3c51c8cce0004e8754" Dec 04 18:15:56 crc kubenswrapper[4631]: E1204 18:15:56.433539 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cdae97fc6a75a2c6d8cd2ec9f5502fad91f0d81b82bec3c51c8cce0004e8754\": container with ID starting with 3cdae97fc6a75a2c6d8cd2ec9f5502fad91f0d81b82bec3c51c8cce0004e8754 not found: ID does not exist" containerID="3cdae97fc6a75a2c6d8cd2ec9f5502fad91f0d81b82bec3c51c8cce0004e8754" Dec 04 18:15:56 crc kubenswrapper[4631]: I1204 18:15:56.433556 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cdae97fc6a75a2c6d8cd2ec9f5502fad91f0d81b82bec3c51c8cce0004e8754"} err="failed to get container status \"3cdae97fc6a75a2c6d8cd2ec9f5502fad91f0d81b82bec3c51c8cce0004e8754\": rpc error: code = NotFound desc = could not find container \"3cdae97fc6a75a2c6d8cd2ec9f5502fad91f0d81b82bec3c51c8cce0004e8754\": container with ID starting with 3cdae97fc6a75a2c6d8cd2ec9f5502fad91f0d81b82bec3c51c8cce0004e8754 not found: ID does not exist" Dec 04 18:15:58 crc kubenswrapper[4631]: I1204 18:15:58.254492 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15b36d30-2c6d-4df7-ae40-1e6c440e7438" path="/var/lib/kubelet/pods/15b36d30-2c6d-4df7-ae40-1e6c440e7438/volumes" Dec 04 18:16:06 crc kubenswrapper[4631]: I1204 18:16:06.023018 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:16:06 crc kubenswrapper[4631]: I1204 18:16:06.023616 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:16:31 crc kubenswrapper[4631]: I1204 18:16:31.641874 4631 generic.go:334] "Generic (PLEG): container finished" podID="93cd2870-edd3-4b7f-9868-6c437dcf3164" containerID="e49c15333fd564d25247cd14a1bcc83407b6fd5e30b56b83cf4aa808c1d41b18" exitCode=0 Dec 04 18:16:31 crc kubenswrapper[4631]: I1204 18:16:31.641962 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" event={"ID":"93cd2870-edd3-4b7f-9868-6c437dcf3164","Type":"ContainerDied","Data":"e49c15333fd564d25247cd14a1bcc83407b6fd5e30b56b83cf4aa808c1d41b18"} Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.074089 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.124171 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-cell1-compute-config-0\") pod \"93cd2870-edd3-4b7f-9868-6c437dcf3164\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.124440 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-extra-config-0\") pod \"93cd2870-edd3-4b7f-9868-6c437dcf3164\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.124555 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-ssh-key\") pod \"93cd2870-edd3-4b7f-9868-6c437dcf3164\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.124635 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-migration-ssh-key-0\") pod \"93cd2870-edd3-4b7f-9868-6c437dcf3164\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.124719 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-cell1-compute-config-1\") pod \"93cd2870-edd3-4b7f-9868-6c437dcf3164\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.124923 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-migration-ssh-key-1\") pod \"93cd2870-edd3-4b7f-9868-6c437dcf3164\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.125068 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lpnfm\" (UniqueName: \"kubernetes.io/projected/93cd2870-edd3-4b7f-9868-6c437dcf3164-kube-api-access-lpnfm\") pod \"93cd2870-edd3-4b7f-9868-6c437dcf3164\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.125159 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-inventory\") pod \"93cd2870-edd3-4b7f-9868-6c437dcf3164\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.125231 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-combined-ca-bundle\") pod \"93cd2870-edd3-4b7f-9868-6c437dcf3164\" (UID: \"93cd2870-edd3-4b7f-9868-6c437dcf3164\") " Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.132520 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "93cd2870-edd3-4b7f-9868-6c437dcf3164" (UID: "93cd2870-edd3-4b7f-9868-6c437dcf3164"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.150794 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93cd2870-edd3-4b7f-9868-6c437dcf3164-kube-api-access-lpnfm" (OuterVolumeSpecName: "kube-api-access-lpnfm") pod "93cd2870-edd3-4b7f-9868-6c437dcf3164" (UID: "93cd2870-edd3-4b7f-9868-6c437dcf3164"). InnerVolumeSpecName "kube-api-access-lpnfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.165896 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "93cd2870-edd3-4b7f-9868-6c437dcf3164" (UID: "93cd2870-edd3-4b7f-9868-6c437dcf3164"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.166505 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "93cd2870-edd3-4b7f-9868-6c437dcf3164" (UID: "93cd2870-edd3-4b7f-9868-6c437dcf3164"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.194462 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "93cd2870-edd3-4b7f-9868-6c437dcf3164" (UID: "93cd2870-edd3-4b7f-9868-6c437dcf3164"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.196592 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "93cd2870-edd3-4b7f-9868-6c437dcf3164" (UID: "93cd2870-edd3-4b7f-9868-6c437dcf3164"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.196736 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "93cd2870-edd3-4b7f-9868-6c437dcf3164" (UID: "93cd2870-edd3-4b7f-9868-6c437dcf3164"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.198798 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-inventory" (OuterVolumeSpecName: "inventory") pod "93cd2870-edd3-4b7f-9868-6c437dcf3164" (UID: "93cd2870-edd3-4b7f-9868-6c437dcf3164"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.218389 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "93cd2870-edd3-4b7f-9868-6c437dcf3164" (UID: "93cd2870-edd3-4b7f-9868-6c437dcf3164"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.227812 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lpnfm\" (UniqueName: \"kubernetes.io/projected/93cd2870-edd3-4b7f-9868-6c437dcf3164-kube-api-access-lpnfm\") on node \"crc\" DevicePath \"\"" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.227848 4631 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.227857 4631 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.227865 4631 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.227877 4631 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.227885 4631 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.227893 4631 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.227902 4631 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.227910 4631 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/93cd2870-edd3-4b7f-9868-6c437dcf3164-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.664287 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" event={"ID":"93cd2870-edd3-4b7f-9868-6c437dcf3164","Type":"ContainerDied","Data":"74b9eb10b728a974e624fcc612e362bb68c76f126f1687da9a3f56478b860ebe"} Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.664660 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74b9eb10b728a974e624fcc612e362bb68c76f126f1687da9a3f56478b860ebe" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.664427 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-lqlcs" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.860031 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6"] Dec 04 18:16:33 crc kubenswrapper[4631]: E1204 18:16:33.860703 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15b36d30-2c6d-4df7-ae40-1e6c440e7438" containerName="extract-content" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.860774 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="15b36d30-2c6d-4df7-ae40-1e6c440e7438" containerName="extract-content" Dec 04 18:16:33 crc kubenswrapper[4631]: E1204 18:16:33.860842 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15b36d30-2c6d-4df7-ae40-1e6c440e7438" containerName="registry-server" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.860912 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="15b36d30-2c6d-4df7-ae40-1e6c440e7438" containerName="registry-server" Dec 04 18:16:33 crc kubenswrapper[4631]: E1204 18:16:33.860994 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93cd2870-edd3-4b7f-9868-6c437dcf3164" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.861061 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="93cd2870-edd3-4b7f-9868-6c437dcf3164" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 04 18:16:33 crc kubenswrapper[4631]: E1204 18:16:33.861128 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15b36d30-2c6d-4df7-ae40-1e6c440e7438" containerName="extract-utilities" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.861178 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="15b36d30-2c6d-4df7-ae40-1e6c440e7438" containerName="extract-utilities" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.861448 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="93cd2870-edd3-4b7f-9868-6c437dcf3164" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.861534 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="15b36d30-2c6d-4df7-ae40-1e6c440e7438" containerName="registry-server" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.862255 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.864064 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.864651 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.866630 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.866887 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.867700 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-9284p" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.869986 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6"] Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.940038 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.940097 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.940682 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.940811 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.940858 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.941011 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5c8lg\" (UniqueName: \"kubernetes.io/projected/15251242-87d0-444d-aa7f-f0b8936efd96-kube-api-access-5c8lg\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:33 crc kubenswrapper[4631]: I1204 18:16:33.941069 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:34 crc kubenswrapper[4631]: I1204 18:16:34.042591 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:34 crc kubenswrapper[4631]: I1204 18:16:34.042659 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:34 crc kubenswrapper[4631]: I1204 18:16:34.042684 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:34 crc kubenswrapper[4631]: I1204 18:16:34.042744 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:34 crc kubenswrapper[4631]: I1204 18:16:34.042782 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:34 crc kubenswrapper[4631]: I1204 18:16:34.042803 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:34 crc kubenswrapper[4631]: I1204 18:16:34.042852 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5c8lg\" (UniqueName: \"kubernetes.io/projected/15251242-87d0-444d-aa7f-f0b8936efd96-kube-api-access-5c8lg\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:34 crc kubenswrapper[4631]: I1204 18:16:34.046853 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:34 crc kubenswrapper[4631]: I1204 18:16:34.047470 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:34 crc kubenswrapper[4631]: I1204 18:16:34.047475 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:34 crc kubenswrapper[4631]: I1204 18:16:34.048219 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:34 crc kubenswrapper[4631]: I1204 18:16:34.049764 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:34 crc kubenswrapper[4631]: I1204 18:16:34.051055 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:34 crc kubenswrapper[4631]: I1204 18:16:34.060726 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5c8lg\" (UniqueName: \"kubernetes.io/projected/15251242-87d0-444d-aa7f-f0b8936efd96-kube-api-access-5c8lg\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:34 crc kubenswrapper[4631]: I1204 18:16:34.180623 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:16:34 crc kubenswrapper[4631]: I1204 18:16:34.782264 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6"] Dec 04 18:16:34 crc kubenswrapper[4631]: W1204 18:16:34.782911 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod15251242_87d0_444d_aa7f_f0b8936efd96.slice/crio-1ba272de113b61b2785f05a267d1badb0944205df6498b68efdfa51e4f4798e3 WatchSource:0}: Error finding container 1ba272de113b61b2785f05a267d1badb0944205df6498b68efdfa51e4f4798e3: Status 404 returned error can't find the container with id 1ba272de113b61b2785f05a267d1badb0944205df6498b68efdfa51e4f4798e3 Dec 04 18:16:35 crc kubenswrapper[4631]: I1204 18:16:35.688397 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" event={"ID":"15251242-87d0-444d-aa7f-f0b8936efd96","Type":"ContainerStarted","Data":"eb479ef7127519123c59adc5fe0cdc48e0b087f865a406d7bbe052c7d311ebbd"} Dec 04 18:16:35 crc kubenswrapper[4631]: I1204 18:16:35.688786 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" event={"ID":"15251242-87d0-444d-aa7f-f0b8936efd96","Type":"ContainerStarted","Data":"1ba272de113b61b2785f05a267d1badb0944205df6498b68efdfa51e4f4798e3"} Dec 04 18:16:36 crc kubenswrapper[4631]: I1204 18:16:36.024015 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:16:36 crc kubenswrapper[4631]: I1204 18:16:36.024088 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:16:36 crc kubenswrapper[4631]: I1204 18:16:36.024144 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 18:16:36 crc kubenswrapper[4631]: I1204 18:16:36.025512 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1"} pod="openshift-machine-config-operator/machine-config-daemon-q27wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 18:16:36 crc kubenswrapper[4631]: I1204 18:16:36.025579 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" containerID="cri-o://5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" gracePeriod=600 Dec 04 18:16:36 crc kubenswrapper[4631]: E1204 18:16:36.162901 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:16:36 crc kubenswrapper[4631]: I1204 18:16:36.699411 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" exitCode=0 Dec 04 18:16:36 crc kubenswrapper[4631]: I1204 18:16:36.699470 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerDied","Data":"5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1"} Dec 04 18:16:36 crc kubenswrapper[4631]: I1204 18:16:36.699520 4631 scope.go:117] "RemoveContainer" containerID="6cf6e79e6d18aca329f54327d5530379d0ca975dbed57eaeda5328e285791625" Dec 04 18:16:36 crc kubenswrapper[4631]: I1204 18:16:36.700135 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:16:36 crc kubenswrapper[4631]: E1204 18:16:36.700466 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:16:36 crc kubenswrapper[4631]: I1204 18:16:36.728061 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" podStartSLOduration=3.540619027 podStartE2EDuration="3.728037848s" podCreationTimestamp="2025-12-04 18:16:33 +0000 UTC" firstStartedPulling="2025-12-04 18:16:34.787662206 +0000 UTC m=+2924.819904204" lastFinishedPulling="2025-12-04 18:16:34.975081017 +0000 UTC m=+2925.007323025" observedRunningTime="2025-12-04 18:16:35.717204384 +0000 UTC m=+2925.749446392" watchObservedRunningTime="2025-12-04 18:16:36.728037848 +0000 UTC m=+2926.760279846" Dec 04 18:16:49 crc kubenswrapper[4631]: I1204 18:16:49.239389 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:16:49 crc kubenswrapper[4631]: E1204 18:16:49.241261 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:16:54 crc kubenswrapper[4631]: I1204 18:16:54.728929 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-grxgg"] Dec 04 18:16:54 crc kubenswrapper[4631]: I1204 18:16:54.731318 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grxgg" Dec 04 18:16:54 crc kubenswrapper[4631]: I1204 18:16:54.746276 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-grxgg"] Dec 04 18:16:54 crc kubenswrapper[4631]: I1204 18:16:54.848528 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0f96ead-5835-4800-9a1b-be666dced91f-catalog-content\") pod \"community-operators-grxgg\" (UID: \"b0f96ead-5835-4800-9a1b-be666dced91f\") " pod="openshift-marketplace/community-operators-grxgg" Dec 04 18:16:54 crc kubenswrapper[4631]: I1204 18:16:54.848655 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhzrb\" (UniqueName: \"kubernetes.io/projected/b0f96ead-5835-4800-9a1b-be666dced91f-kube-api-access-hhzrb\") pod \"community-operators-grxgg\" (UID: \"b0f96ead-5835-4800-9a1b-be666dced91f\") " pod="openshift-marketplace/community-operators-grxgg" Dec 04 18:16:54 crc kubenswrapper[4631]: I1204 18:16:54.848679 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0f96ead-5835-4800-9a1b-be666dced91f-utilities\") pod \"community-operators-grxgg\" (UID: \"b0f96ead-5835-4800-9a1b-be666dced91f\") " pod="openshift-marketplace/community-operators-grxgg" Dec 04 18:16:54 crc kubenswrapper[4631]: I1204 18:16:54.950837 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0f96ead-5835-4800-9a1b-be666dced91f-catalog-content\") pod \"community-operators-grxgg\" (UID: \"b0f96ead-5835-4800-9a1b-be666dced91f\") " pod="openshift-marketplace/community-operators-grxgg" Dec 04 18:16:54 crc kubenswrapper[4631]: I1204 18:16:54.950935 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhzrb\" (UniqueName: \"kubernetes.io/projected/b0f96ead-5835-4800-9a1b-be666dced91f-kube-api-access-hhzrb\") pod \"community-operators-grxgg\" (UID: \"b0f96ead-5835-4800-9a1b-be666dced91f\") " pod="openshift-marketplace/community-operators-grxgg" Dec 04 18:16:54 crc kubenswrapper[4631]: I1204 18:16:54.950962 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0f96ead-5835-4800-9a1b-be666dced91f-utilities\") pod \"community-operators-grxgg\" (UID: \"b0f96ead-5835-4800-9a1b-be666dced91f\") " pod="openshift-marketplace/community-operators-grxgg" Dec 04 18:16:54 crc kubenswrapper[4631]: I1204 18:16:54.951436 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0f96ead-5835-4800-9a1b-be666dced91f-catalog-content\") pod \"community-operators-grxgg\" (UID: \"b0f96ead-5835-4800-9a1b-be666dced91f\") " pod="openshift-marketplace/community-operators-grxgg" Dec 04 18:16:54 crc kubenswrapper[4631]: I1204 18:16:54.951436 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0f96ead-5835-4800-9a1b-be666dced91f-utilities\") pod \"community-operators-grxgg\" (UID: \"b0f96ead-5835-4800-9a1b-be666dced91f\") " pod="openshift-marketplace/community-operators-grxgg" Dec 04 18:16:54 crc kubenswrapper[4631]: I1204 18:16:54.978708 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhzrb\" (UniqueName: \"kubernetes.io/projected/b0f96ead-5835-4800-9a1b-be666dced91f-kube-api-access-hhzrb\") pod \"community-operators-grxgg\" (UID: \"b0f96ead-5835-4800-9a1b-be666dced91f\") " pod="openshift-marketplace/community-operators-grxgg" Dec 04 18:16:55 crc kubenswrapper[4631]: I1204 18:16:55.070868 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grxgg" Dec 04 18:16:55 crc kubenswrapper[4631]: W1204 18:16:55.630450 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb0f96ead_5835_4800_9a1b_be666dced91f.slice/crio-c8e5a23fc8a5d73247fcde533dae76a7aa5a7fd3744358112c2b3a55d0dbc52b WatchSource:0}: Error finding container c8e5a23fc8a5d73247fcde533dae76a7aa5a7fd3744358112c2b3a55d0dbc52b: Status 404 returned error can't find the container with id c8e5a23fc8a5d73247fcde533dae76a7aa5a7fd3744358112c2b3a55d0dbc52b Dec 04 18:16:55 crc kubenswrapper[4631]: I1204 18:16:55.633305 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-grxgg"] Dec 04 18:16:55 crc kubenswrapper[4631]: I1204 18:16:55.873780 4631 generic.go:334] "Generic (PLEG): container finished" podID="b0f96ead-5835-4800-9a1b-be666dced91f" containerID="c2f126c88e40cb4236dfbd94f918ca3dc4c5dd4e4b315dffdf1ab6b1677cc5fc" exitCode=0 Dec 04 18:16:55 crc kubenswrapper[4631]: I1204 18:16:55.873880 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grxgg" event={"ID":"b0f96ead-5835-4800-9a1b-be666dced91f","Type":"ContainerDied","Data":"c2f126c88e40cb4236dfbd94f918ca3dc4c5dd4e4b315dffdf1ab6b1677cc5fc"} Dec 04 18:16:55 crc kubenswrapper[4631]: I1204 18:16:55.874084 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grxgg" event={"ID":"b0f96ead-5835-4800-9a1b-be666dced91f","Type":"ContainerStarted","Data":"c8e5a23fc8a5d73247fcde533dae76a7aa5a7fd3744358112c2b3a55d0dbc52b"} Dec 04 18:16:56 crc kubenswrapper[4631]: I1204 18:16:56.887029 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grxgg" event={"ID":"b0f96ead-5835-4800-9a1b-be666dced91f","Type":"ContainerStarted","Data":"3959347c08b74f5ea3bb3e9a1f9b9d7d937472269bb72c786a709e3e4d6b7466"} Dec 04 18:16:57 crc kubenswrapper[4631]: I1204 18:16:57.901501 4631 generic.go:334] "Generic (PLEG): container finished" podID="b0f96ead-5835-4800-9a1b-be666dced91f" containerID="3959347c08b74f5ea3bb3e9a1f9b9d7d937472269bb72c786a709e3e4d6b7466" exitCode=0 Dec 04 18:16:57 crc kubenswrapper[4631]: I1204 18:16:57.901583 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grxgg" event={"ID":"b0f96ead-5835-4800-9a1b-be666dced91f","Type":"ContainerDied","Data":"3959347c08b74f5ea3bb3e9a1f9b9d7d937472269bb72c786a709e3e4d6b7466"} Dec 04 18:16:58 crc kubenswrapper[4631]: I1204 18:16:58.919839 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grxgg" event={"ID":"b0f96ead-5835-4800-9a1b-be666dced91f","Type":"ContainerStarted","Data":"c45a34d85a03b7220c3ca5968270e7462adcddba7fe60cf89dcf97413a11a80e"} Dec 04 18:16:58 crc kubenswrapper[4631]: I1204 18:16:58.954622 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-grxgg" podStartSLOduration=2.436826341 podStartE2EDuration="4.954600829s" podCreationTimestamp="2025-12-04 18:16:54 +0000 UTC" firstStartedPulling="2025-12-04 18:16:55.875486894 +0000 UTC m=+2945.907728892" lastFinishedPulling="2025-12-04 18:16:58.393261382 +0000 UTC m=+2948.425503380" observedRunningTime="2025-12-04 18:16:58.949013669 +0000 UTC m=+2948.981255667" watchObservedRunningTime="2025-12-04 18:16:58.954600829 +0000 UTC m=+2948.986842827" Dec 04 18:17:01 crc kubenswrapper[4631]: I1204 18:17:01.239524 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:17:01 crc kubenswrapper[4631]: E1204 18:17:01.240001 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:17:05 crc kubenswrapper[4631]: I1204 18:17:05.071786 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-grxgg" Dec 04 18:17:05 crc kubenswrapper[4631]: I1204 18:17:05.072704 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-grxgg" Dec 04 18:17:05 crc kubenswrapper[4631]: I1204 18:17:05.119079 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-grxgg" Dec 04 18:17:06 crc kubenswrapper[4631]: I1204 18:17:06.060777 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-grxgg" Dec 04 18:17:06 crc kubenswrapper[4631]: I1204 18:17:06.114182 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-grxgg"] Dec 04 18:17:07 crc kubenswrapper[4631]: I1204 18:17:07.998099 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-grxgg" podUID="b0f96ead-5835-4800-9a1b-be666dced91f" containerName="registry-server" containerID="cri-o://c45a34d85a03b7220c3ca5968270e7462adcddba7fe60cf89dcf97413a11a80e" gracePeriod=2 Dec 04 18:17:08 crc kubenswrapper[4631]: I1204 18:17:08.505453 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grxgg" Dec 04 18:17:08 crc kubenswrapper[4631]: I1204 18:17:08.608553 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0f96ead-5835-4800-9a1b-be666dced91f-utilities\") pod \"b0f96ead-5835-4800-9a1b-be666dced91f\" (UID: \"b0f96ead-5835-4800-9a1b-be666dced91f\") " Dec 04 18:17:08 crc kubenswrapper[4631]: I1204 18:17:08.608643 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0f96ead-5835-4800-9a1b-be666dced91f-catalog-content\") pod \"b0f96ead-5835-4800-9a1b-be666dced91f\" (UID: \"b0f96ead-5835-4800-9a1b-be666dced91f\") " Dec 04 18:17:08 crc kubenswrapper[4631]: I1204 18:17:08.608689 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hhzrb\" (UniqueName: \"kubernetes.io/projected/b0f96ead-5835-4800-9a1b-be666dced91f-kube-api-access-hhzrb\") pod \"b0f96ead-5835-4800-9a1b-be666dced91f\" (UID: \"b0f96ead-5835-4800-9a1b-be666dced91f\") " Dec 04 18:17:08 crc kubenswrapper[4631]: I1204 18:17:08.609592 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0f96ead-5835-4800-9a1b-be666dced91f-utilities" (OuterVolumeSpecName: "utilities") pod "b0f96ead-5835-4800-9a1b-be666dced91f" (UID: "b0f96ead-5835-4800-9a1b-be666dced91f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:17:08 crc kubenswrapper[4631]: I1204 18:17:08.610320 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0f96ead-5835-4800-9a1b-be666dced91f-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:17:08 crc kubenswrapper[4631]: I1204 18:17:08.619564 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0f96ead-5835-4800-9a1b-be666dced91f-kube-api-access-hhzrb" (OuterVolumeSpecName: "kube-api-access-hhzrb") pod "b0f96ead-5835-4800-9a1b-be666dced91f" (UID: "b0f96ead-5835-4800-9a1b-be666dced91f"). InnerVolumeSpecName "kube-api-access-hhzrb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:17:08 crc kubenswrapper[4631]: I1204 18:17:08.683328 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0f96ead-5835-4800-9a1b-be666dced91f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b0f96ead-5835-4800-9a1b-be666dced91f" (UID: "b0f96ead-5835-4800-9a1b-be666dced91f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:17:08 crc kubenswrapper[4631]: I1204 18:17:08.713464 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0f96ead-5835-4800-9a1b-be666dced91f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:17:08 crc kubenswrapper[4631]: I1204 18:17:08.713687 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hhzrb\" (UniqueName: \"kubernetes.io/projected/b0f96ead-5835-4800-9a1b-be666dced91f-kube-api-access-hhzrb\") on node \"crc\" DevicePath \"\"" Dec 04 18:17:09 crc kubenswrapper[4631]: I1204 18:17:09.007487 4631 generic.go:334] "Generic (PLEG): container finished" podID="b0f96ead-5835-4800-9a1b-be666dced91f" containerID="c45a34d85a03b7220c3ca5968270e7462adcddba7fe60cf89dcf97413a11a80e" exitCode=0 Dec 04 18:17:09 crc kubenswrapper[4631]: I1204 18:17:09.007564 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grxgg" Dec 04 18:17:09 crc kubenswrapper[4631]: I1204 18:17:09.007570 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grxgg" event={"ID":"b0f96ead-5835-4800-9a1b-be666dced91f","Type":"ContainerDied","Data":"c45a34d85a03b7220c3ca5968270e7462adcddba7fe60cf89dcf97413a11a80e"} Dec 04 18:17:09 crc kubenswrapper[4631]: I1204 18:17:09.007858 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grxgg" event={"ID":"b0f96ead-5835-4800-9a1b-be666dced91f","Type":"ContainerDied","Data":"c8e5a23fc8a5d73247fcde533dae76a7aa5a7fd3744358112c2b3a55d0dbc52b"} Dec 04 18:17:09 crc kubenswrapper[4631]: I1204 18:17:09.007879 4631 scope.go:117] "RemoveContainer" containerID="c45a34d85a03b7220c3ca5968270e7462adcddba7fe60cf89dcf97413a11a80e" Dec 04 18:17:09 crc kubenswrapper[4631]: I1204 18:17:09.031720 4631 scope.go:117] "RemoveContainer" containerID="3959347c08b74f5ea3bb3e9a1f9b9d7d937472269bb72c786a709e3e4d6b7466" Dec 04 18:17:09 crc kubenswrapper[4631]: I1204 18:17:09.058434 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-grxgg"] Dec 04 18:17:09 crc kubenswrapper[4631]: I1204 18:17:09.066753 4631 scope.go:117] "RemoveContainer" containerID="c2f126c88e40cb4236dfbd94f918ca3dc4c5dd4e4b315dffdf1ab6b1677cc5fc" Dec 04 18:17:09 crc kubenswrapper[4631]: I1204 18:17:09.085946 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-grxgg"] Dec 04 18:17:09 crc kubenswrapper[4631]: I1204 18:17:09.109687 4631 scope.go:117] "RemoveContainer" containerID="c45a34d85a03b7220c3ca5968270e7462adcddba7fe60cf89dcf97413a11a80e" Dec 04 18:17:09 crc kubenswrapper[4631]: E1204 18:17:09.110163 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c45a34d85a03b7220c3ca5968270e7462adcddba7fe60cf89dcf97413a11a80e\": container with ID starting with c45a34d85a03b7220c3ca5968270e7462adcddba7fe60cf89dcf97413a11a80e not found: ID does not exist" containerID="c45a34d85a03b7220c3ca5968270e7462adcddba7fe60cf89dcf97413a11a80e" Dec 04 18:17:09 crc kubenswrapper[4631]: I1204 18:17:09.110254 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c45a34d85a03b7220c3ca5968270e7462adcddba7fe60cf89dcf97413a11a80e"} err="failed to get container status \"c45a34d85a03b7220c3ca5968270e7462adcddba7fe60cf89dcf97413a11a80e\": rpc error: code = NotFound desc = could not find container \"c45a34d85a03b7220c3ca5968270e7462adcddba7fe60cf89dcf97413a11a80e\": container with ID starting with c45a34d85a03b7220c3ca5968270e7462adcddba7fe60cf89dcf97413a11a80e not found: ID does not exist" Dec 04 18:17:09 crc kubenswrapper[4631]: I1204 18:17:09.110287 4631 scope.go:117] "RemoveContainer" containerID="3959347c08b74f5ea3bb3e9a1f9b9d7d937472269bb72c786a709e3e4d6b7466" Dec 04 18:17:09 crc kubenswrapper[4631]: E1204 18:17:09.110729 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3959347c08b74f5ea3bb3e9a1f9b9d7d937472269bb72c786a709e3e4d6b7466\": container with ID starting with 3959347c08b74f5ea3bb3e9a1f9b9d7d937472269bb72c786a709e3e4d6b7466 not found: ID does not exist" containerID="3959347c08b74f5ea3bb3e9a1f9b9d7d937472269bb72c786a709e3e4d6b7466" Dec 04 18:17:09 crc kubenswrapper[4631]: I1204 18:17:09.110765 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3959347c08b74f5ea3bb3e9a1f9b9d7d937472269bb72c786a709e3e4d6b7466"} err="failed to get container status \"3959347c08b74f5ea3bb3e9a1f9b9d7d937472269bb72c786a709e3e4d6b7466\": rpc error: code = NotFound desc = could not find container \"3959347c08b74f5ea3bb3e9a1f9b9d7d937472269bb72c786a709e3e4d6b7466\": container with ID starting with 3959347c08b74f5ea3bb3e9a1f9b9d7d937472269bb72c786a709e3e4d6b7466 not found: ID does not exist" Dec 04 18:17:09 crc kubenswrapper[4631]: I1204 18:17:09.110784 4631 scope.go:117] "RemoveContainer" containerID="c2f126c88e40cb4236dfbd94f918ca3dc4c5dd4e4b315dffdf1ab6b1677cc5fc" Dec 04 18:17:09 crc kubenswrapper[4631]: E1204 18:17:09.112712 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2f126c88e40cb4236dfbd94f918ca3dc4c5dd4e4b315dffdf1ab6b1677cc5fc\": container with ID starting with c2f126c88e40cb4236dfbd94f918ca3dc4c5dd4e4b315dffdf1ab6b1677cc5fc not found: ID does not exist" containerID="c2f126c88e40cb4236dfbd94f918ca3dc4c5dd4e4b315dffdf1ab6b1677cc5fc" Dec 04 18:17:09 crc kubenswrapper[4631]: I1204 18:17:09.112740 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2f126c88e40cb4236dfbd94f918ca3dc4c5dd4e4b315dffdf1ab6b1677cc5fc"} err="failed to get container status \"c2f126c88e40cb4236dfbd94f918ca3dc4c5dd4e4b315dffdf1ab6b1677cc5fc\": rpc error: code = NotFound desc = could not find container \"c2f126c88e40cb4236dfbd94f918ca3dc4c5dd4e4b315dffdf1ab6b1677cc5fc\": container with ID starting with c2f126c88e40cb4236dfbd94f918ca3dc4c5dd4e4b315dffdf1ab6b1677cc5fc not found: ID does not exist" Dec 04 18:17:10 crc kubenswrapper[4631]: I1204 18:17:10.250257 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0f96ead-5835-4800-9a1b-be666dced91f" path="/var/lib/kubelet/pods/b0f96ead-5835-4800-9a1b-be666dced91f/volumes" Dec 04 18:17:13 crc kubenswrapper[4631]: I1204 18:17:13.240275 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:17:13 crc kubenswrapper[4631]: E1204 18:17:13.240935 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:17:26 crc kubenswrapper[4631]: I1204 18:17:26.240193 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:17:26 crc kubenswrapper[4631]: E1204 18:17:26.240985 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:17:38 crc kubenswrapper[4631]: I1204 18:17:38.239856 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:17:38 crc kubenswrapper[4631]: E1204 18:17:38.241630 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:17:53 crc kubenswrapper[4631]: I1204 18:17:53.240572 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:17:53 crc kubenswrapper[4631]: E1204 18:17:53.242693 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:18:05 crc kubenswrapper[4631]: I1204 18:18:05.239030 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:18:05 crc kubenswrapper[4631]: E1204 18:18:05.239983 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:18:19 crc kubenswrapper[4631]: I1204 18:18:19.239600 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:18:19 crc kubenswrapper[4631]: E1204 18:18:19.240607 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:18:30 crc kubenswrapper[4631]: I1204 18:18:30.248081 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:18:30 crc kubenswrapper[4631]: E1204 18:18:30.248868 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:18:45 crc kubenswrapper[4631]: I1204 18:18:45.240444 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:18:45 crc kubenswrapper[4631]: E1204 18:18:45.241159 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:18:56 crc kubenswrapper[4631]: I1204 18:18:56.240497 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:18:56 crc kubenswrapper[4631]: E1204 18:18:56.241303 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:19:07 crc kubenswrapper[4631]: I1204 18:19:07.239674 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:19:07 crc kubenswrapper[4631]: E1204 18:19:07.240312 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:19:22 crc kubenswrapper[4631]: I1204 18:19:22.240434 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:19:22 crc kubenswrapper[4631]: E1204 18:19:22.241033 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:19:34 crc kubenswrapper[4631]: I1204 18:19:34.580836 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-n7bcl"] Dec 04 18:19:34 crc kubenswrapper[4631]: E1204 18:19:34.581847 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0f96ead-5835-4800-9a1b-be666dced91f" containerName="extract-utilities" Dec 04 18:19:34 crc kubenswrapper[4631]: I1204 18:19:34.581862 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0f96ead-5835-4800-9a1b-be666dced91f" containerName="extract-utilities" Dec 04 18:19:34 crc kubenswrapper[4631]: E1204 18:19:34.581877 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0f96ead-5835-4800-9a1b-be666dced91f" containerName="extract-content" Dec 04 18:19:34 crc kubenswrapper[4631]: I1204 18:19:34.581883 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0f96ead-5835-4800-9a1b-be666dced91f" containerName="extract-content" Dec 04 18:19:34 crc kubenswrapper[4631]: E1204 18:19:34.581912 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0f96ead-5835-4800-9a1b-be666dced91f" containerName="registry-server" Dec 04 18:19:34 crc kubenswrapper[4631]: I1204 18:19:34.581918 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0f96ead-5835-4800-9a1b-be666dced91f" containerName="registry-server" Dec 04 18:19:34 crc kubenswrapper[4631]: I1204 18:19:34.582093 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0f96ead-5835-4800-9a1b-be666dced91f" containerName="registry-server" Dec 04 18:19:34 crc kubenswrapper[4631]: I1204 18:19:34.583324 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n7bcl" Dec 04 18:19:34 crc kubenswrapper[4631]: I1204 18:19:34.607426 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n7bcl"] Dec 04 18:19:34 crc kubenswrapper[4631]: I1204 18:19:34.714058 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a96fa3f1-bfdc-4152-9276-18bf0cadd7d6-utilities\") pod \"redhat-operators-n7bcl\" (UID: \"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6\") " pod="openshift-marketplace/redhat-operators-n7bcl" Dec 04 18:19:34 crc kubenswrapper[4631]: I1204 18:19:34.714126 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpbgx\" (UniqueName: \"kubernetes.io/projected/a96fa3f1-bfdc-4152-9276-18bf0cadd7d6-kube-api-access-vpbgx\") pod \"redhat-operators-n7bcl\" (UID: \"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6\") " pod="openshift-marketplace/redhat-operators-n7bcl" Dec 04 18:19:34 crc kubenswrapper[4631]: I1204 18:19:34.715213 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a96fa3f1-bfdc-4152-9276-18bf0cadd7d6-catalog-content\") pod \"redhat-operators-n7bcl\" (UID: \"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6\") " pod="openshift-marketplace/redhat-operators-n7bcl" Dec 04 18:19:34 crc kubenswrapper[4631]: I1204 18:19:34.816463 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpbgx\" (UniqueName: \"kubernetes.io/projected/a96fa3f1-bfdc-4152-9276-18bf0cadd7d6-kube-api-access-vpbgx\") pod \"redhat-operators-n7bcl\" (UID: \"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6\") " pod="openshift-marketplace/redhat-operators-n7bcl" Dec 04 18:19:34 crc kubenswrapper[4631]: I1204 18:19:34.816805 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a96fa3f1-bfdc-4152-9276-18bf0cadd7d6-catalog-content\") pod \"redhat-operators-n7bcl\" (UID: \"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6\") " pod="openshift-marketplace/redhat-operators-n7bcl" Dec 04 18:19:34 crc kubenswrapper[4631]: I1204 18:19:34.816948 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a96fa3f1-bfdc-4152-9276-18bf0cadd7d6-utilities\") pod \"redhat-operators-n7bcl\" (UID: \"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6\") " pod="openshift-marketplace/redhat-operators-n7bcl" Dec 04 18:19:34 crc kubenswrapper[4631]: I1204 18:19:34.817363 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a96fa3f1-bfdc-4152-9276-18bf0cadd7d6-catalog-content\") pod \"redhat-operators-n7bcl\" (UID: \"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6\") " pod="openshift-marketplace/redhat-operators-n7bcl" Dec 04 18:19:34 crc kubenswrapper[4631]: I1204 18:19:34.817441 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a96fa3f1-bfdc-4152-9276-18bf0cadd7d6-utilities\") pod \"redhat-operators-n7bcl\" (UID: \"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6\") " pod="openshift-marketplace/redhat-operators-n7bcl" Dec 04 18:19:34 crc kubenswrapper[4631]: I1204 18:19:34.837987 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpbgx\" (UniqueName: \"kubernetes.io/projected/a96fa3f1-bfdc-4152-9276-18bf0cadd7d6-kube-api-access-vpbgx\") pod \"redhat-operators-n7bcl\" (UID: \"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6\") " pod="openshift-marketplace/redhat-operators-n7bcl" Dec 04 18:19:34 crc kubenswrapper[4631]: I1204 18:19:34.908442 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n7bcl" Dec 04 18:19:35 crc kubenswrapper[4631]: I1204 18:19:35.458250 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n7bcl"] Dec 04 18:19:36 crc kubenswrapper[4631]: I1204 18:19:36.239776 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:19:36 crc kubenswrapper[4631]: E1204 18:19:36.240287 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:19:36 crc kubenswrapper[4631]: I1204 18:19:36.372134 4631 generic.go:334] "Generic (PLEG): container finished" podID="a96fa3f1-bfdc-4152-9276-18bf0cadd7d6" containerID="2512a79310e3f04dbea53bdd80c66bcc870a4e6258d908ccf2b408d05e5d7e7b" exitCode=0 Dec 04 18:19:36 crc kubenswrapper[4631]: I1204 18:19:36.372187 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n7bcl" event={"ID":"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6","Type":"ContainerDied","Data":"2512a79310e3f04dbea53bdd80c66bcc870a4e6258d908ccf2b408d05e5d7e7b"} Dec 04 18:19:36 crc kubenswrapper[4631]: I1204 18:19:36.372215 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n7bcl" event={"ID":"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6","Type":"ContainerStarted","Data":"673f0c961c2073bdb2841937ec69b1e16c656a1705196def48111238d60b715c"} Dec 04 18:19:36 crc kubenswrapper[4631]: I1204 18:19:36.373903 4631 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 18:19:38 crc kubenswrapper[4631]: I1204 18:19:38.390780 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n7bcl" event={"ID":"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6","Type":"ContainerStarted","Data":"ed5cd49ecc326451676a3634a44d0a799243cbcb20c834be3bbb2d9a796b788e"} Dec 04 18:19:41 crc kubenswrapper[4631]: I1204 18:19:41.414961 4631 generic.go:334] "Generic (PLEG): container finished" podID="a96fa3f1-bfdc-4152-9276-18bf0cadd7d6" containerID="ed5cd49ecc326451676a3634a44d0a799243cbcb20c834be3bbb2d9a796b788e" exitCode=0 Dec 04 18:19:41 crc kubenswrapper[4631]: I1204 18:19:41.415036 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n7bcl" event={"ID":"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6","Type":"ContainerDied","Data":"ed5cd49ecc326451676a3634a44d0a799243cbcb20c834be3bbb2d9a796b788e"} Dec 04 18:19:42 crc kubenswrapper[4631]: I1204 18:19:42.428022 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n7bcl" event={"ID":"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6","Type":"ContainerStarted","Data":"0bd215e17170ad8574864ac8d82f7c54d21c1b4e99ffa91cf269a3559f48a344"} Dec 04 18:19:42 crc kubenswrapper[4631]: I1204 18:19:42.472862 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-n7bcl" podStartSLOduration=2.834497981 podStartE2EDuration="8.472842541s" podCreationTimestamp="2025-12-04 18:19:34 +0000 UTC" firstStartedPulling="2025-12-04 18:19:36.373609547 +0000 UTC m=+3106.405851545" lastFinishedPulling="2025-12-04 18:19:42.011954107 +0000 UTC m=+3112.044196105" observedRunningTime="2025-12-04 18:19:42.448582823 +0000 UTC m=+3112.480824821" watchObservedRunningTime="2025-12-04 18:19:42.472842541 +0000 UTC m=+3112.505084539" Dec 04 18:19:44 crc kubenswrapper[4631]: I1204 18:19:44.908831 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-n7bcl" Dec 04 18:19:44 crc kubenswrapper[4631]: I1204 18:19:44.909357 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-n7bcl" Dec 04 18:19:45 crc kubenswrapper[4631]: I1204 18:19:45.957300 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-n7bcl" podUID="a96fa3f1-bfdc-4152-9276-18bf0cadd7d6" containerName="registry-server" probeResult="failure" output=< Dec 04 18:19:45 crc kubenswrapper[4631]: timeout: failed to connect service ":50051" within 1s Dec 04 18:19:45 crc kubenswrapper[4631]: > Dec 04 18:19:47 crc kubenswrapper[4631]: I1204 18:19:47.476417 4631 generic.go:334] "Generic (PLEG): container finished" podID="15251242-87d0-444d-aa7f-f0b8936efd96" containerID="eb479ef7127519123c59adc5fe0cdc48e0b087f865a406d7bbe052c7d311ebbd" exitCode=0 Dec 04 18:19:47 crc kubenswrapper[4631]: I1204 18:19:47.476527 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" event={"ID":"15251242-87d0-444d-aa7f-f0b8936efd96","Type":"ContainerDied","Data":"eb479ef7127519123c59adc5fe0cdc48e0b087f865a406d7bbe052c7d311ebbd"} Dec 04 18:19:48 crc kubenswrapper[4631]: I1204 18:19:48.939883 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.008699 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ceilometer-compute-config-data-0\") pod \"15251242-87d0-444d-aa7f-f0b8936efd96\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.008765 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-telemetry-combined-ca-bundle\") pod \"15251242-87d0-444d-aa7f-f0b8936efd96\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.008865 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-inventory\") pod \"15251242-87d0-444d-aa7f-f0b8936efd96\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.008913 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5c8lg\" (UniqueName: \"kubernetes.io/projected/15251242-87d0-444d-aa7f-f0b8936efd96-kube-api-access-5c8lg\") pod \"15251242-87d0-444d-aa7f-f0b8936efd96\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.008971 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ceilometer-compute-config-data-1\") pod \"15251242-87d0-444d-aa7f-f0b8936efd96\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.009017 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ssh-key\") pod \"15251242-87d0-444d-aa7f-f0b8936efd96\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.009098 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ceilometer-compute-config-data-2\") pod \"15251242-87d0-444d-aa7f-f0b8936efd96\" (UID: \"15251242-87d0-444d-aa7f-f0b8936efd96\") " Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.017565 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "15251242-87d0-444d-aa7f-f0b8936efd96" (UID: "15251242-87d0-444d-aa7f-f0b8936efd96"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.028664 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15251242-87d0-444d-aa7f-f0b8936efd96-kube-api-access-5c8lg" (OuterVolumeSpecName: "kube-api-access-5c8lg") pod "15251242-87d0-444d-aa7f-f0b8936efd96" (UID: "15251242-87d0-444d-aa7f-f0b8936efd96"). InnerVolumeSpecName "kube-api-access-5c8lg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.047785 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "15251242-87d0-444d-aa7f-f0b8936efd96" (UID: "15251242-87d0-444d-aa7f-f0b8936efd96"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.057541 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-inventory" (OuterVolumeSpecName: "inventory") pod "15251242-87d0-444d-aa7f-f0b8936efd96" (UID: "15251242-87d0-444d-aa7f-f0b8936efd96"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.069587 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "15251242-87d0-444d-aa7f-f0b8936efd96" (UID: "15251242-87d0-444d-aa7f-f0b8936efd96"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.075701 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "15251242-87d0-444d-aa7f-f0b8936efd96" (UID: "15251242-87d0-444d-aa7f-f0b8936efd96"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.079919 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "15251242-87d0-444d-aa7f-f0b8936efd96" (UID: "15251242-87d0-444d-aa7f-f0b8936efd96"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.111557 4631 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-inventory\") on node \"crc\" DevicePath \"\"" Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.111593 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5c8lg\" (UniqueName: \"kubernetes.io/projected/15251242-87d0-444d-aa7f-f0b8936efd96-kube-api-access-5c8lg\") on node \"crc\" DevicePath \"\"" Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.111604 4631 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.111614 4631 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.111622 4631 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.111631 4631 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.111639 4631 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15251242-87d0-444d-aa7f-f0b8936efd96-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.491577 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" event={"ID":"15251242-87d0-444d-aa7f-f0b8936efd96","Type":"ContainerDied","Data":"1ba272de113b61b2785f05a267d1badb0944205df6498b68efdfa51e4f4798e3"} Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.491881 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ba272de113b61b2785f05a267d1badb0944205df6498b68efdfa51e4f4798e3" Dec 04 18:19:49 crc kubenswrapper[4631]: I1204 18:19:49.491641 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6" Dec 04 18:19:51 crc kubenswrapper[4631]: I1204 18:19:51.239798 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:19:51 crc kubenswrapper[4631]: E1204 18:19:51.240409 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:19:54 crc kubenswrapper[4631]: I1204 18:19:54.952158 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-n7bcl" Dec 04 18:19:55 crc kubenswrapper[4631]: I1204 18:19:55.017896 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-n7bcl" Dec 04 18:19:55 crc kubenswrapper[4631]: I1204 18:19:55.195026 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n7bcl"] Dec 04 18:19:56 crc kubenswrapper[4631]: I1204 18:19:56.559682 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-n7bcl" podUID="a96fa3f1-bfdc-4152-9276-18bf0cadd7d6" containerName="registry-server" containerID="cri-o://0bd215e17170ad8574864ac8d82f7c54d21c1b4e99ffa91cf269a3559f48a344" gracePeriod=2 Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.032917 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n7bcl" Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.079247 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpbgx\" (UniqueName: \"kubernetes.io/projected/a96fa3f1-bfdc-4152-9276-18bf0cadd7d6-kube-api-access-vpbgx\") pod \"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6\" (UID: \"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6\") " Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.079347 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a96fa3f1-bfdc-4152-9276-18bf0cadd7d6-catalog-content\") pod \"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6\" (UID: \"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6\") " Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.079556 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a96fa3f1-bfdc-4152-9276-18bf0cadd7d6-utilities\") pod \"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6\" (UID: \"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6\") " Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.080622 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a96fa3f1-bfdc-4152-9276-18bf0cadd7d6-utilities" (OuterVolumeSpecName: "utilities") pod "a96fa3f1-bfdc-4152-9276-18bf0cadd7d6" (UID: "a96fa3f1-bfdc-4152-9276-18bf0cadd7d6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.095284 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a96fa3f1-bfdc-4152-9276-18bf0cadd7d6-kube-api-access-vpbgx" (OuterVolumeSpecName: "kube-api-access-vpbgx") pod "a96fa3f1-bfdc-4152-9276-18bf0cadd7d6" (UID: "a96fa3f1-bfdc-4152-9276-18bf0cadd7d6"). InnerVolumeSpecName "kube-api-access-vpbgx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.182330 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpbgx\" (UniqueName: \"kubernetes.io/projected/a96fa3f1-bfdc-4152-9276-18bf0cadd7d6-kube-api-access-vpbgx\") on node \"crc\" DevicePath \"\"" Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.182365 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a96fa3f1-bfdc-4152-9276-18bf0cadd7d6-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.194318 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a96fa3f1-bfdc-4152-9276-18bf0cadd7d6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a96fa3f1-bfdc-4152-9276-18bf0cadd7d6" (UID: "a96fa3f1-bfdc-4152-9276-18bf0cadd7d6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.285019 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a96fa3f1-bfdc-4152-9276-18bf0cadd7d6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.569248 4631 generic.go:334] "Generic (PLEG): container finished" podID="a96fa3f1-bfdc-4152-9276-18bf0cadd7d6" containerID="0bd215e17170ad8574864ac8d82f7c54d21c1b4e99ffa91cf269a3559f48a344" exitCode=0 Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.569293 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n7bcl" event={"ID":"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6","Type":"ContainerDied","Data":"0bd215e17170ad8574864ac8d82f7c54d21c1b4e99ffa91cf269a3559f48a344"} Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.569323 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n7bcl" event={"ID":"a96fa3f1-bfdc-4152-9276-18bf0cadd7d6","Type":"ContainerDied","Data":"673f0c961c2073bdb2841937ec69b1e16c656a1705196def48111238d60b715c"} Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.569344 4631 scope.go:117] "RemoveContainer" containerID="0bd215e17170ad8574864ac8d82f7c54d21c1b4e99ffa91cf269a3559f48a344" Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.569490 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n7bcl" Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.609735 4631 scope.go:117] "RemoveContainer" containerID="ed5cd49ecc326451676a3634a44d0a799243cbcb20c834be3bbb2d9a796b788e" Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.612919 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n7bcl"] Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.624875 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-n7bcl"] Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.637738 4631 scope.go:117] "RemoveContainer" containerID="2512a79310e3f04dbea53bdd80c66bcc870a4e6258d908ccf2b408d05e5d7e7b" Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.688832 4631 scope.go:117] "RemoveContainer" containerID="0bd215e17170ad8574864ac8d82f7c54d21c1b4e99ffa91cf269a3559f48a344" Dec 04 18:19:57 crc kubenswrapper[4631]: E1204 18:19:57.689516 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0bd215e17170ad8574864ac8d82f7c54d21c1b4e99ffa91cf269a3559f48a344\": container with ID starting with 0bd215e17170ad8574864ac8d82f7c54d21c1b4e99ffa91cf269a3559f48a344 not found: ID does not exist" containerID="0bd215e17170ad8574864ac8d82f7c54d21c1b4e99ffa91cf269a3559f48a344" Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.689556 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bd215e17170ad8574864ac8d82f7c54d21c1b4e99ffa91cf269a3559f48a344"} err="failed to get container status \"0bd215e17170ad8574864ac8d82f7c54d21c1b4e99ffa91cf269a3559f48a344\": rpc error: code = NotFound desc = could not find container \"0bd215e17170ad8574864ac8d82f7c54d21c1b4e99ffa91cf269a3559f48a344\": container with ID starting with 0bd215e17170ad8574864ac8d82f7c54d21c1b4e99ffa91cf269a3559f48a344 not found: ID does not exist" Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.689581 4631 scope.go:117] "RemoveContainer" containerID="ed5cd49ecc326451676a3634a44d0a799243cbcb20c834be3bbb2d9a796b788e" Dec 04 18:19:57 crc kubenswrapper[4631]: E1204 18:19:57.690436 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed5cd49ecc326451676a3634a44d0a799243cbcb20c834be3bbb2d9a796b788e\": container with ID starting with ed5cd49ecc326451676a3634a44d0a799243cbcb20c834be3bbb2d9a796b788e not found: ID does not exist" containerID="ed5cd49ecc326451676a3634a44d0a799243cbcb20c834be3bbb2d9a796b788e" Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.690515 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed5cd49ecc326451676a3634a44d0a799243cbcb20c834be3bbb2d9a796b788e"} err="failed to get container status \"ed5cd49ecc326451676a3634a44d0a799243cbcb20c834be3bbb2d9a796b788e\": rpc error: code = NotFound desc = could not find container \"ed5cd49ecc326451676a3634a44d0a799243cbcb20c834be3bbb2d9a796b788e\": container with ID starting with ed5cd49ecc326451676a3634a44d0a799243cbcb20c834be3bbb2d9a796b788e not found: ID does not exist" Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.690551 4631 scope.go:117] "RemoveContainer" containerID="2512a79310e3f04dbea53bdd80c66bcc870a4e6258d908ccf2b408d05e5d7e7b" Dec 04 18:19:57 crc kubenswrapper[4631]: E1204 18:19:57.691154 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2512a79310e3f04dbea53bdd80c66bcc870a4e6258d908ccf2b408d05e5d7e7b\": container with ID starting with 2512a79310e3f04dbea53bdd80c66bcc870a4e6258d908ccf2b408d05e5d7e7b not found: ID does not exist" containerID="2512a79310e3f04dbea53bdd80c66bcc870a4e6258d908ccf2b408d05e5d7e7b" Dec 04 18:19:57 crc kubenswrapper[4631]: I1204 18:19:57.691178 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2512a79310e3f04dbea53bdd80c66bcc870a4e6258d908ccf2b408d05e5d7e7b"} err="failed to get container status \"2512a79310e3f04dbea53bdd80c66bcc870a4e6258d908ccf2b408d05e5d7e7b\": rpc error: code = NotFound desc = could not find container \"2512a79310e3f04dbea53bdd80c66bcc870a4e6258d908ccf2b408d05e5d7e7b\": container with ID starting with 2512a79310e3f04dbea53bdd80c66bcc870a4e6258d908ccf2b408d05e5d7e7b not found: ID does not exist" Dec 04 18:19:58 crc kubenswrapper[4631]: I1204 18:19:58.260612 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a96fa3f1-bfdc-4152-9276-18bf0cadd7d6" path="/var/lib/kubelet/pods/a96fa3f1-bfdc-4152-9276-18bf0cadd7d6/volumes" Dec 04 18:20:02 crc kubenswrapper[4631]: I1204 18:20:02.240732 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:20:02 crc kubenswrapper[4631]: E1204 18:20:02.241401 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:20:15 crc kubenswrapper[4631]: I1204 18:20:15.240082 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:20:15 crc kubenswrapper[4631]: E1204 18:20:15.240889 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:20:27 crc kubenswrapper[4631]: I1204 18:20:27.239962 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:20:27 crc kubenswrapper[4631]: E1204 18:20:27.241081 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.462892 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 04 18:20:39 crc kubenswrapper[4631]: E1204 18:20:39.463740 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a96fa3f1-bfdc-4152-9276-18bf0cadd7d6" containerName="extract-utilities" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.463754 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a96fa3f1-bfdc-4152-9276-18bf0cadd7d6" containerName="extract-utilities" Dec 04 18:20:39 crc kubenswrapper[4631]: E1204 18:20:39.463773 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a96fa3f1-bfdc-4152-9276-18bf0cadd7d6" containerName="registry-server" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.463780 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a96fa3f1-bfdc-4152-9276-18bf0cadd7d6" containerName="registry-server" Dec 04 18:20:39 crc kubenswrapper[4631]: E1204 18:20:39.463797 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15251242-87d0-444d-aa7f-f0b8936efd96" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.463804 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="15251242-87d0-444d-aa7f-f0b8936efd96" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 04 18:20:39 crc kubenswrapper[4631]: E1204 18:20:39.463820 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a96fa3f1-bfdc-4152-9276-18bf0cadd7d6" containerName="extract-content" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.463826 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a96fa3f1-bfdc-4152-9276-18bf0cadd7d6" containerName="extract-content" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.463995 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="a96fa3f1-bfdc-4152-9276-18bf0cadd7d6" containerName="registry-server" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.464007 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="15251242-87d0-444d-aa7f-f0b8936efd96" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.464597 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.467101 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.467643 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.467949 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-vnbgz" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.468146 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.482169 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.576531 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/36e446e6-248d-4a69-80f1-585a9bfcd4cf-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.576606 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/36e446e6-248d-4a69-80f1-585a9bfcd4cf-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.576639 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/36e446e6-248d-4a69-80f1-585a9bfcd4cf-config-data\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.576671 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/36e446e6-248d-4a69-80f1-585a9bfcd4cf-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.576896 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36e446e6-248d-4a69-80f1-585a9bfcd4cf-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.576923 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/36e446e6-248d-4a69-80f1-585a9bfcd4cf-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.576946 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phhs8\" (UniqueName: \"kubernetes.io/projected/36e446e6-248d-4a69-80f1-585a9bfcd4cf-kube-api-access-phhs8\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.576980 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.577070 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/36e446e6-248d-4a69-80f1-585a9bfcd4cf-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.678352 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/36e446e6-248d-4a69-80f1-585a9bfcd4cf-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.678513 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/36e446e6-248d-4a69-80f1-585a9bfcd4cf-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.678559 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/36e446e6-248d-4a69-80f1-585a9bfcd4cf-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.678596 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/36e446e6-248d-4a69-80f1-585a9bfcd4cf-config-data\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.678636 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/36e446e6-248d-4a69-80f1-585a9bfcd4cf-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.678672 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36e446e6-248d-4a69-80f1-585a9bfcd4cf-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.678700 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/36e446e6-248d-4a69-80f1-585a9bfcd4cf-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.678732 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phhs8\" (UniqueName: \"kubernetes.io/projected/36e446e6-248d-4a69-80f1-585a9bfcd4cf-kube-api-access-phhs8\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.678801 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.679105 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/36e446e6-248d-4a69-80f1-585a9bfcd4cf-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.679488 4631 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.680758 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/36e446e6-248d-4a69-80f1-585a9bfcd4cf-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.681742 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/36e446e6-248d-4a69-80f1-585a9bfcd4cf-config-data\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.682733 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/36e446e6-248d-4a69-80f1-585a9bfcd4cf-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.687638 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/36e446e6-248d-4a69-80f1-585a9bfcd4cf-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.690162 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36e446e6-248d-4a69-80f1-585a9bfcd4cf-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.703147 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/36e446e6-248d-4a69-80f1-585a9bfcd4cf-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.704452 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phhs8\" (UniqueName: \"kubernetes.io/projected/36e446e6-248d-4a69-80f1-585a9bfcd4cf-kube-api-access-phhs8\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.716269 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " pod="openstack/tempest-tests-tempest" Dec 04 18:20:39 crc kubenswrapper[4631]: I1204 18:20:39.794322 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 04 18:20:40 crc kubenswrapper[4631]: I1204 18:20:40.249509 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:20:40 crc kubenswrapper[4631]: E1204 18:20:40.250095 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:20:40 crc kubenswrapper[4631]: I1204 18:20:40.250114 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 04 18:20:41 crc kubenswrapper[4631]: I1204 18:20:41.009083 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"36e446e6-248d-4a69-80f1-585a9bfcd4cf","Type":"ContainerStarted","Data":"c372aac970b065751e0adce7b6a5fbe3583a27c496d286a0e17f781147054888"} Dec 04 18:20:54 crc kubenswrapper[4631]: I1204 18:20:54.241343 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:20:54 crc kubenswrapper[4631]: E1204 18:20:54.242192 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:21:07 crc kubenswrapper[4631]: I1204 18:21:07.239746 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:21:07 crc kubenswrapper[4631]: E1204 18:21:07.240582 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:21:13 crc kubenswrapper[4631]: E1204 18:21:13.507222 4631 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 04 18:21:13 crc kubenswrapper[4631]: E1204 18:21:13.510519 4631 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-phhs8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(36e446e6-248d-4a69-80f1-585a9bfcd4cf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 04 18:21:13 crc kubenswrapper[4631]: E1204 18:21:13.511868 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="36e446e6-248d-4a69-80f1-585a9bfcd4cf" Dec 04 18:21:14 crc kubenswrapper[4631]: E1204 18:21:14.349697 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="36e446e6-248d-4a69-80f1-585a9bfcd4cf" Dec 04 18:21:20 crc kubenswrapper[4631]: I1204 18:21:20.247411 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:21:20 crc kubenswrapper[4631]: E1204 18:21:20.248220 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:21:26 crc kubenswrapper[4631]: I1204 18:21:26.669063 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 04 18:21:28 crc kubenswrapper[4631]: I1204 18:21:28.485052 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"36e446e6-248d-4a69-80f1-585a9bfcd4cf","Type":"ContainerStarted","Data":"f39fba016f1c04bf310ee5b23eb7500a5267e84cf36d997b427307f67827a4f4"} Dec 04 18:21:32 crc kubenswrapper[4631]: I1204 18:21:32.239619 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:21:32 crc kubenswrapper[4631]: E1204 18:21:32.240509 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:21:44 crc kubenswrapper[4631]: I1204 18:21:44.240203 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:21:44 crc kubenswrapper[4631]: I1204 18:21:44.613563 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"f77a279325343725c665491a0a3f0ab9d510d4cf2851fa6c24cd73fd7d33dbfe"} Dec 04 18:21:44 crc kubenswrapper[4631]: I1204 18:21:44.647476 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=20.239495777 podStartE2EDuration="1m6.647447424s" podCreationTimestamp="2025-12-04 18:20:38 +0000 UTC" firstStartedPulling="2025-12-04 18:20:40.259001517 +0000 UTC m=+3170.291243515" lastFinishedPulling="2025-12-04 18:21:26.666953164 +0000 UTC m=+3216.699195162" observedRunningTime="2025-12-04 18:21:28.505701822 +0000 UTC m=+3218.537943820" watchObservedRunningTime="2025-12-04 18:21:44.647447424 +0000 UTC m=+3234.679689422" Dec 04 18:23:01 crc kubenswrapper[4631]: I1204 18:23:01.579470 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pxbcf"] Dec 04 18:23:01 crc kubenswrapper[4631]: I1204 18:23:01.585187 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pxbcf" Dec 04 18:23:01 crc kubenswrapper[4631]: I1204 18:23:01.598605 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pxbcf"] Dec 04 18:23:01 crc kubenswrapper[4631]: I1204 18:23:01.737382 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cba6dd8-e8de-492c-b25f-f7092a13c72f-catalog-content\") pod \"certified-operators-pxbcf\" (UID: \"2cba6dd8-e8de-492c-b25f-f7092a13c72f\") " pod="openshift-marketplace/certified-operators-pxbcf" Dec 04 18:23:01 crc kubenswrapper[4631]: I1204 18:23:01.737466 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cba6dd8-e8de-492c-b25f-f7092a13c72f-utilities\") pod \"certified-operators-pxbcf\" (UID: \"2cba6dd8-e8de-492c-b25f-f7092a13c72f\") " pod="openshift-marketplace/certified-operators-pxbcf" Dec 04 18:23:01 crc kubenswrapper[4631]: I1204 18:23:01.737860 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbncx\" (UniqueName: \"kubernetes.io/projected/2cba6dd8-e8de-492c-b25f-f7092a13c72f-kube-api-access-sbncx\") pod \"certified-operators-pxbcf\" (UID: \"2cba6dd8-e8de-492c-b25f-f7092a13c72f\") " pod="openshift-marketplace/certified-operators-pxbcf" Dec 04 18:23:01 crc kubenswrapper[4631]: I1204 18:23:01.840277 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cba6dd8-e8de-492c-b25f-f7092a13c72f-catalog-content\") pod \"certified-operators-pxbcf\" (UID: \"2cba6dd8-e8de-492c-b25f-f7092a13c72f\") " pod="openshift-marketplace/certified-operators-pxbcf" Dec 04 18:23:01 crc kubenswrapper[4631]: I1204 18:23:01.840681 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cba6dd8-e8de-492c-b25f-f7092a13c72f-utilities\") pod \"certified-operators-pxbcf\" (UID: \"2cba6dd8-e8de-492c-b25f-f7092a13c72f\") " pod="openshift-marketplace/certified-operators-pxbcf" Dec 04 18:23:01 crc kubenswrapper[4631]: I1204 18:23:01.840848 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cba6dd8-e8de-492c-b25f-f7092a13c72f-catalog-content\") pod \"certified-operators-pxbcf\" (UID: \"2cba6dd8-e8de-492c-b25f-f7092a13c72f\") " pod="openshift-marketplace/certified-operators-pxbcf" Dec 04 18:23:01 crc kubenswrapper[4631]: I1204 18:23:01.841043 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbncx\" (UniqueName: \"kubernetes.io/projected/2cba6dd8-e8de-492c-b25f-f7092a13c72f-kube-api-access-sbncx\") pod \"certified-operators-pxbcf\" (UID: \"2cba6dd8-e8de-492c-b25f-f7092a13c72f\") " pod="openshift-marketplace/certified-operators-pxbcf" Dec 04 18:23:01 crc kubenswrapper[4631]: I1204 18:23:01.841119 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cba6dd8-e8de-492c-b25f-f7092a13c72f-utilities\") pod \"certified-operators-pxbcf\" (UID: \"2cba6dd8-e8de-492c-b25f-f7092a13c72f\") " pod="openshift-marketplace/certified-operators-pxbcf" Dec 04 18:23:01 crc kubenswrapper[4631]: I1204 18:23:01.866811 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbncx\" (UniqueName: \"kubernetes.io/projected/2cba6dd8-e8de-492c-b25f-f7092a13c72f-kube-api-access-sbncx\") pod \"certified-operators-pxbcf\" (UID: \"2cba6dd8-e8de-492c-b25f-f7092a13c72f\") " pod="openshift-marketplace/certified-operators-pxbcf" Dec 04 18:23:01 crc kubenswrapper[4631]: I1204 18:23:01.904210 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pxbcf" Dec 04 18:23:02 crc kubenswrapper[4631]: I1204 18:23:02.712144 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pxbcf"] Dec 04 18:23:03 crc kubenswrapper[4631]: I1204 18:23:03.518525 4631 generic.go:334] "Generic (PLEG): container finished" podID="2cba6dd8-e8de-492c-b25f-f7092a13c72f" containerID="a6947f0d8246cb1a1ba6999a365408b664c98a704c7c32ad7f06b3a8af2fbd6d" exitCode=0 Dec 04 18:23:03 crc kubenswrapper[4631]: I1204 18:23:03.518618 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pxbcf" event={"ID":"2cba6dd8-e8de-492c-b25f-f7092a13c72f","Type":"ContainerDied","Data":"a6947f0d8246cb1a1ba6999a365408b664c98a704c7c32ad7f06b3a8af2fbd6d"} Dec 04 18:23:03 crc kubenswrapper[4631]: I1204 18:23:03.518784 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pxbcf" event={"ID":"2cba6dd8-e8de-492c-b25f-f7092a13c72f","Type":"ContainerStarted","Data":"403c8f397f3d8d24d3ab23300a3d2dd2b70eacb1005bacf5d8467bf6eb6cf92c"} Dec 04 18:23:09 crc kubenswrapper[4631]: I1204 18:23:09.570905 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pxbcf" event={"ID":"2cba6dd8-e8de-492c-b25f-f7092a13c72f","Type":"ContainerStarted","Data":"4599f9337c534c4feddb41c169f8dc97dbcbc3b3a8cea4e6e10f68c882d215be"} Dec 04 18:23:10 crc kubenswrapper[4631]: I1204 18:23:10.580660 4631 generic.go:334] "Generic (PLEG): container finished" podID="2cba6dd8-e8de-492c-b25f-f7092a13c72f" containerID="4599f9337c534c4feddb41c169f8dc97dbcbc3b3a8cea4e6e10f68c882d215be" exitCode=0 Dec 04 18:23:10 crc kubenswrapper[4631]: I1204 18:23:10.580800 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pxbcf" event={"ID":"2cba6dd8-e8de-492c-b25f-f7092a13c72f","Type":"ContainerDied","Data":"4599f9337c534c4feddb41c169f8dc97dbcbc3b3a8cea4e6e10f68c882d215be"} Dec 04 18:23:12 crc kubenswrapper[4631]: I1204 18:23:12.602659 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pxbcf" event={"ID":"2cba6dd8-e8de-492c-b25f-f7092a13c72f","Type":"ContainerStarted","Data":"0ddc6fb9aa094deffafea8554d4d77f68a8e690131dc2fd2ebb9e3e0e869f5cb"} Dec 04 18:23:12 crc kubenswrapper[4631]: I1204 18:23:12.620897 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pxbcf" podStartSLOduration=3.568284067 podStartE2EDuration="11.620874554s" podCreationTimestamp="2025-12-04 18:23:01 +0000 UTC" firstStartedPulling="2025-12-04 18:23:03.521261412 +0000 UTC m=+3313.553503440" lastFinishedPulling="2025-12-04 18:23:11.573851929 +0000 UTC m=+3321.606093927" observedRunningTime="2025-12-04 18:23:12.618703393 +0000 UTC m=+3322.650945391" watchObservedRunningTime="2025-12-04 18:23:12.620874554 +0000 UTC m=+3322.653116552" Dec 04 18:23:21 crc kubenswrapper[4631]: I1204 18:23:21.904525 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pxbcf" Dec 04 18:23:21 crc kubenswrapper[4631]: I1204 18:23:21.905147 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pxbcf" Dec 04 18:23:21 crc kubenswrapper[4631]: I1204 18:23:21.960233 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pxbcf" Dec 04 18:23:22 crc kubenswrapper[4631]: I1204 18:23:22.733550 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pxbcf" Dec 04 18:23:23 crc kubenswrapper[4631]: I1204 18:23:23.070950 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pxbcf"] Dec 04 18:23:23 crc kubenswrapper[4631]: I1204 18:23:23.142485 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q95bk"] Dec 04 18:23:23 crc kubenswrapper[4631]: I1204 18:23:23.142732 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-q95bk" podUID="0f3f50d4-2cbd-4b36-b6b4-00e312badb0f" containerName="registry-server" containerID="cri-o://772b862120ba7a4d831f3f9e65c7226051aa5847714389f145eef80a487064c1" gracePeriod=2 Dec 04 18:23:23 crc kubenswrapper[4631]: E1204 18:23:23.559543 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 772b862120ba7a4d831f3f9e65c7226051aa5847714389f145eef80a487064c1 is running failed: container process not found" containerID="772b862120ba7a4d831f3f9e65c7226051aa5847714389f145eef80a487064c1" cmd=["grpc_health_probe","-addr=:50051"] Dec 04 18:23:23 crc kubenswrapper[4631]: E1204 18:23:23.561244 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 772b862120ba7a4d831f3f9e65c7226051aa5847714389f145eef80a487064c1 is running failed: container process not found" containerID="772b862120ba7a4d831f3f9e65c7226051aa5847714389f145eef80a487064c1" cmd=["grpc_health_probe","-addr=:50051"] Dec 04 18:23:23 crc kubenswrapper[4631]: E1204 18:23:23.574662 4631 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 772b862120ba7a4d831f3f9e65c7226051aa5847714389f145eef80a487064c1 is running failed: container process not found" containerID="772b862120ba7a4d831f3f9e65c7226051aa5847714389f145eef80a487064c1" cmd=["grpc_health_probe","-addr=:50051"] Dec 04 18:23:23 crc kubenswrapper[4631]: E1204 18:23:23.574720 4631 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 772b862120ba7a4d831f3f9e65c7226051aa5847714389f145eef80a487064c1 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-q95bk" podUID="0f3f50d4-2cbd-4b36-b6b4-00e312badb0f" containerName="registry-server" Dec 04 18:23:23 crc kubenswrapper[4631]: I1204 18:23:23.705476 4631 generic.go:334] "Generic (PLEG): container finished" podID="0f3f50d4-2cbd-4b36-b6b4-00e312badb0f" containerID="772b862120ba7a4d831f3f9e65c7226051aa5847714389f145eef80a487064c1" exitCode=0 Dec 04 18:23:23 crc kubenswrapper[4631]: I1204 18:23:23.705589 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q95bk" event={"ID":"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f","Type":"ContainerDied","Data":"772b862120ba7a4d831f3f9e65c7226051aa5847714389f145eef80a487064c1"} Dec 04 18:23:24 crc kubenswrapper[4631]: I1204 18:23:24.012625 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q95bk" Dec 04 18:23:24 crc kubenswrapper[4631]: I1204 18:23:24.117889 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f3f50d4-2cbd-4b36-b6b4-00e312badb0f-utilities\") pod \"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f\" (UID: \"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f\") " Dec 04 18:23:24 crc kubenswrapper[4631]: I1204 18:23:24.118159 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j4v8c\" (UniqueName: \"kubernetes.io/projected/0f3f50d4-2cbd-4b36-b6b4-00e312badb0f-kube-api-access-j4v8c\") pod \"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f\" (UID: \"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f\") " Dec 04 18:23:24 crc kubenswrapper[4631]: I1204 18:23:24.118206 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f3f50d4-2cbd-4b36-b6b4-00e312badb0f-catalog-content\") pod \"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f\" (UID: \"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f\") " Dec 04 18:23:24 crc kubenswrapper[4631]: I1204 18:23:24.119536 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f3f50d4-2cbd-4b36-b6b4-00e312badb0f-utilities" (OuterVolumeSpecName: "utilities") pod "0f3f50d4-2cbd-4b36-b6b4-00e312badb0f" (UID: "0f3f50d4-2cbd-4b36-b6b4-00e312badb0f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:23:24 crc kubenswrapper[4631]: I1204 18:23:24.132934 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f3f50d4-2cbd-4b36-b6b4-00e312badb0f-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:23:24 crc kubenswrapper[4631]: I1204 18:23:24.188938 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f3f50d4-2cbd-4b36-b6b4-00e312badb0f-kube-api-access-j4v8c" (OuterVolumeSpecName: "kube-api-access-j4v8c") pod "0f3f50d4-2cbd-4b36-b6b4-00e312badb0f" (UID: "0f3f50d4-2cbd-4b36-b6b4-00e312badb0f"). InnerVolumeSpecName "kube-api-access-j4v8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:23:24 crc kubenswrapper[4631]: I1204 18:23:24.237643 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j4v8c\" (UniqueName: \"kubernetes.io/projected/0f3f50d4-2cbd-4b36-b6b4-00e312badb0f-kube-api-access-j4v8c\") on node \"crc\" DevicePath \"\"" Dec 04 18:23:24 crc kubenswrapper[4631]: I1204 18:23:24.239908 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f3f50d4-2cbd-4b36-b6b4-00e312badb0f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0f3f50d4-2cbd-4b36-b6b4-00e312badb0f" (UID: "0f3f50d4-2cbd-4b36-b6b4-00e312badb0f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:23:24 crc kubenswrapper[4631]: I1204 18:23:24.340065 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f3f50d4-2cbd-4b36-b6b4-00e312badb0f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:23:24 crc kubenswrapper[4631]: I1204 18:23:24.716458 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q95bk" Dec 04 18:23:24 crc kubenswrapper[4631]: I1204 18:23:24.716451 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q95bk" event={"ID":"0f3f50d4-2cbd-4b36-b6b4-00e312badb0f","Type":"ContainerDied","Data":"7ddd6637e8e9ab3509a6c4960b38b4c7c3aa5e479234915a2210d1f1a7d9600f"} Dec 04 18:23:24 crc kubenswrapper[4631]: I1204 18:23:24.717071 4631 scope.go:117] "RemoveContainer" containerID="772b862120ba7a4d831f3f9e65c7226051aa5847714389f145eef80a487064c1" Dec 04 18:23:24 crc kubenswrapper[4631]: I1204 18:23:24.743731 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q95bk"] Dec 04 18:23:24 crc kubenswrapper[4631]: I1204 18:23:24.755911 4631 scope.go:117] "RemoveContainer" containerID="09c00272e91fe577a4291302ed2699f82eaa5e0f5aea95a43aefadbea437e69c" Dec 04 18:23:24 crc kubenswrapper[4631]: I1204 18:23:24.757041 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-q95bk"] Dec 04 18:23:24 crc kubenswrapper[4631]: I1204 18:23:24.783171 4631 scope.go:117] "RemoveContainer" containerID="d865191746326baf2ae83e1a6fb20c384e057bc0c507ca3402ecef81e5d70d51" Dec 04 18:23:26 crc kubenswrapper[4631]: I1204 18:23:26.249163 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f3f50d4-2cbd-4b36-b6b4-00e312badb0f" path="/var/lib/kubelet/pods/0f3f50d4-2cbd-4b36-b6b4-00e312badb0f/volumes" Dec 04 18:24:06 crc kubenswrapper[4631]: I1204 18:24:06.022411 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:24:06 crc kubenswrapper[4631]: I1204 18:24:06.022940 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:24:36 crc kubenswrapper[4631]: I1204 18:24:36.023087 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:24:36 crc kubenswrapper[4631]: I1204 18:24:36.023682 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:25:06 crc kubenswrapper[4631]: I1204 18:25:06.022449 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:25:06 crc kubenswrapper[4631]: I1204 18:25:06.024291 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:25:06 crc kubenswrapper[4631]: I1204 18:25:06.024509 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 18:25:06 crc kubenswrapper[4631]: I1204 18:25:06.026119 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f77a279325343725c665491a0a3f0ab9d510d4cf2851fa6c24cd73fd7d33dbfe"} pod="openshift-machine-config-operator/machine-config-daemon-q27wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 18:25:06 crc kubenswrapper[4631]: I1204 18:25:06.026363 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" containerID="cri-o://f77a279325343725c665491a0a3f0ab9d510d4cf2851fa6c24cd73fd7d33dbfe" gracePeriod=600 Dec 04 18:25:06 crc kubenswrapper[4631]: I1204 18:25:06.624332 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerID="f77a279325343725c665491a0a3f0ab9d510d4cf2851fa6c24cd73fd7d33dbfe" exitCode=0 Dec 04 18:25:06 crc kubenswrapper[4631]: I1204 18:25:06.624401 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerDied","Data":"f77a279325343725c665491a0a3f0ab9d510d4cf2851fa6c24cd73fd7d33dbfe"} Dec 04 18:25:06 crc kubenswrapper[4631]: I1204 18:25:06.624718 4631 scope.go:117] "RemoveContainer" containerID="5d72a7d21ec1c2dc43759189859ed16e63dac409a35eccb7d8c0aa5bda3025c1" Dec 04 18:25:07 crc kubenswrapper[4631]: I1204 18:25:07.635500 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa"} Dec 04 18:25:49 crc kubenswrapper[4631]: I1204 18:25:49.929720 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7q8lz"] Dec 04 18:25:49 crc kubenswrapper[4631]: E1204 18:25:49.930686 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f3f50d4-2cbd-4b36-b6b4-00e312badb0f" containerName="extract-utilities" Dec 04 18:25:49 crc kubenswrapper[4631]: I1204 18:25:49.930700 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f3f50d4-2cbd-4b36-b6b4-00e312badb0f" containerName="extract-utilities" Dec 04 18:25:49 crc kubenswrapper[4631]: E1204 18:25:49.930728 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f3f50d4-2cbd-4b36-b6b4-00e312badb0f" containerName="registry-server" Dec 04 18:25:49 crc kubenswrapper[4631]: I1204 18:25:49.930734 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f3f50d4-2cbd-4b36-b6b4-00e312badb0f" containerName="registry-server" Dec 04 18:25:49 crc kubenswrapper[4631]: E1204 18:25:49.930741 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f3f50d4-2cbd-4b36-b6b4-00e312badb0f" containerName="extract-content" Dec 04 18:25:49 crc kubenswrapper[4631]: I1204 18:25:49.930747 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f3f50d4-2cbd-4b36-b6b4-00e312badb0f" containerName="extract-content" Dec 04 18:25:49 crc kubenswrapper[4631]: I1204 18:25:49.930948 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f3f50d4-2cbd-4b36-b6b4-00e312badb0f" containerName="registry-server" Dec 04 18:25:49 crc kubenswrapper[4631]: I1204 18:25:49.934910 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7q8lz" Dec 04 18:25:49 crc kubenswrapper[4631]: I1204 18:25:49.939454 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7q8lz"] Dec 04 18:25:50 crc kubenswrapper[4631]: I1204 18:25:50.023055 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc28468-5d48-42b5-b85a-7d27cdd9fc4b-utilities\") pod \"redhat-marketplace-7q8lz\" (UID: \"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b\") " pod="openshift-marketplace/redhat-marketplace-7q8lz" Dec 04 18:25:50 crc kubenswrapper[4631]: I1204 18:25:50.023111 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc28468-5d48-42b5-b85a-7d27cdd9fc4b-catalog-content\") pod \"redhat-marketplace-7q8lz\" (UID: \"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b\") " pod="openshift-marketplace/redhat-marketplace-7q8lz" Dec 04 18:25:50 crc kubenswrapper[4631]: I1204 18:25:50.023185 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nr74\" (UniqueName: \"kubernetes.io/projected/0fc28468-5d48-42b5-b85a-7d27cdd9fc4b-kube-api-access-9nr74\") pod \"redhat-marketplace-7q8lz\" (UID: \"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b\") " pod="openshift-marketplace/redhat-marketplace-7q8lz" Dec 04 18:25:50 crc kubenswrapper[4631]: I1204 18:25:50.124404 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc28468-5d48-42b5-b85a-7d27cdd9fc4b-catalog-content\") pod \"redhat-marketplace-7q8lz\" (UID: \"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b\") " pod="openshift-marketplace/redhat-marketplace-7q8lz" Dec 04 18:25:50 crc kubenswrapper[4631]: I1204 18:25:50.124501 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nr74\" (UniqueName: \"kubernetes.io/projected/0fc28468-5d48-42b5-b85a-7d27cdd9fc4b-kube-api-access-9nr74\") pod \"redhat-marketplace-7q8lz\" (UID: \"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b\") " pod="openshift-marketplace/redhat-marketplace-7q8lz" Dec 04 18:25:50 crc kubenswrapper[4631]: I1204 18:25:50.124602 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc28468-5d48-42b5-b85a-7d27cdd9fc4b-utilities\") pod \"redhat-marketplace-7q8lz\" (UID: \"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b\") " pod="openshift-marketplace/redhat-marketplace-7q8lz" Dec 04 18:25:50 crc kubenswrapper[4631]: I1204 18:25:50.124984 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc28468-5d48-42b5-b85a-7d27cdd9fc4b-catalog-content\") pod \"redhat-marketplace-7q8lz\" (UID: \"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b\") " pod="openshift-marketplace/redhat-marketplace-7q8lz" Dec 04 18:25:50 crc kubenswrapper[4631]: I1204 18:25:50.125016 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc28468-5d48-42b5-b85a-7d27cdd9fc4b-utilities\") pod \"redhat-marketplace-7q8lz\" (UID: \"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b\") " pod="openshift-marketplace/redhat-marketplace-7q8lz" Dec 04 18:25:50 crc kubenswrapper[4631]: I1204 18:25:50.154575 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nr74\" (UniqueName: \"kubernetes.io/projected/0fc28468-5d48-42b5-b85a-7d27cdd9fc4b-kube-api-access-9nr74\") pod \"redhat-marketplace-7q8lz\" (UID: \"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b\") " pod="openshift-marketplace/redhat-marketplace-7q8lz" Dec 04 18:25:50 crc kubenswrapper[4631]: I1204 18:25:50.301323 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7q8lz" Dec 04 18:25:51 crc kubenswrapper[4631]: I1204 18:25:51.048732 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7q8lz"] Dec 04 18:25:51 crc kubenswrapper[4631]: I1204 18:25:51.328015 4631 generic.go:334] "Generic (PLEG): container finished" podID="0fc28468-5d48-42b5-b85a-7d27cdd9fc4b" containerID="2488971c0631d4053e4335affb909b035e44590bc7992d99c796fcf38c9ba3fa" exitCode=0 Dec 04 18:25:51 crc kubenswrapper[4631]: I1204 18:25:51.328320 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7q8lz" event={"ID":"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b","Type":"ContainerDied","Data":"2488971c0631d4053e4335affb909b035e44590bc7992d99c796fcf38c9ba3fa"} Dec 04 18:25:51 crc kubenswrapper[4631]: I1204 18:25:51.328349 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7q8lz" event={"ID":"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b","Type":"ContainerStarted","Data":"398948ef7d54697c5195ffe2b63356834a929a0b5113311503c1cb2bb70a911c"} Dec 04 18:25:51 crc kubenswrapper[4631]: I1204 18:25:51.331949 4631 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 18:25:52 crc kubenswrapper[4631]: I1204 18:25:52.339003 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7q8lz" event={"ID":"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b","Type":"ContainerStarted","Data":"e6a866f6d828b5cd9346fc33f45689e0df39ce6f3c578cae2edd291f1f478c0d"} Dec 04 18:25:53 crc kubenswrapper[4631]: I1204 18:25:53.380170 4631 generic.go:334] "Generic (PLEG): container finished" podID="0fc28468-5d48-42b5-b85a-7d27cdd9fc4b" containerID="e6a866f6d828b5cd9346fc33f45689e0df39ce6f3c578cae2edd291f1f478c0d" exitCode=0 Dec 04 18:25:53 crc kubenswrapper[4631]: I1204 18:25:53.380475 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7q8lz" event={"ID":"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b","Type":"ContainerDied","Data":"e6a866f6d828b5cd9346fc33f45689e0df39ce6f3c578cae2edd291f1f478c0d"} Dec 04 18:25:54 crc kubenswrapper[4631]: I1204 18:25:54.389537 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7q8lz" event={"ID":"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b","Type":"ContainerStarted","Data":"247f2fc0ddd3229d2aef84cd3d9bf60e900f8504e7a34713579e33939cbe89bb"} Dec 04 18:25:54 crc kubenswrapper[4631]: I1204 18:25:54.436762 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7q8lz" podStartSLOduration=2.981483276 podStartE2EDuration="5.436747272s" podCreationTimestamp="2025-12-04 18:25:49 +0000 UTC" firstStartedPulling="2025-12-04 18:25:51.331739157 +0000 UTC m=+3481.363981155" lastFinishedPulling="2025-12-04 18:25:53.787003153 +0000 UTC m=+3483.819245151" observedRunningTime="2025-12-04 18:25:54.42972447 +0000 UTC m=+3484.461966468" watchObservedRunningTime="2025-12-04 18:25:54.436747272 +0000 UTC m=+3484.468989270" Dec 04 18:26:00 crc kubenswrapper[4631]: I1204 18:26:00.302708 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7q8lz" Dec 04 18:26:00 crc kubenswrapper[4631]: I1204 18:26:00.303389 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7q8lz" Dec 04 18:26:00 crc kubenswrapper[4631]: I1204 18:26:00.354699 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7q8lz" Dec 04 18:26:00 crc kubenswrapper[4631]: I1204 18:26:00.489864 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7q8lz" Dec 04 18:26:05 crc kubenswrapper[4631]: I1204 18:26:05.286597 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7q8lz"] Dec 04 18:26:05 crc kubenswrapper[4631]: I1204 18:26:05.287227 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7q8lz" podUID="0fc28468-5d48-42b5-b85a-7d27cdd9fc4b" containerName="registry-server" containerID="cri-o://247f2fc0ddd3229d2aef84cd3d9bf60e900f8504e7a34713579e33939cbe89bb" gracePeriod=2 Dec 04 18:26:05 crc kubenswrapper[4631]: I1204 18:26:05.483403 4631 generic.go:334] "Generic (PLEG): container finished" podID="0fc28468-5d48-42b5-b85a-7d27cdd9fc4b" containerID="247f2fc0ddd3229d2aef84cd3d9bf60e900f8504e7a34713579e33939cbe89bb" exitCode=0 Dec 04 18:26:05 crc kubenswrapper[4631]: I1204 18:26:05.483479 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7q8lz" event={"ID":"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b","Type":"ContainerDied","Data":"247f2fc0ddd3229d2aef84cd3d9bf60e900f8504e7a34713579e33939cbe89bb"} Dec 04 18:26:05 crc kubenswrapper[4631]: I1204 18:26:05.867456 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7q8lz" Dec 04 18:26:05 crc kubenswrapper[4631]: I1204 18:26:05.936106 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9nr74\" (UniqueName: \"kubernetes.io/projected/0fc28468-5d48-42b5-b85a-7d27cdd9fc4b-kube-api-access-9nr74\") pod \"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b\" (UID: \"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b\") " Dec 04 18:26:05 crc kubenswrapper[4631]: I1204 18:26:05.936334 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc28468-5d48-42b5-b85a-7d27cdd9fc4b-utilities\") pod \"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b\" (UID: \"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b\") " Dec 04 18:26:05 crc kubenswrapper[4631]: I1204 18:26:05.936395 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc28468-5d48-42b5-b85a-7d27cdd9fc4b-catalog-content\") pod \"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b\" (UID: \"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b\") " Dec 04 18:26:05 crc kubenswrapper[4631]: I1204 18:26:05.937302 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fc28468-5d48-42b5-b85a-7d27cdd9fc4b-utilities" (OuterVolumeSpecName: "utilities") pod "0fc28468-5d48-42b5-b85a-7d27cdd9fc4b" (UID: "0fc28468-5d48-42b5-b85a-7d27cdd9fc4b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:26:05 crc kubenswrapper[4631]: I1204 18:26:05.937828 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc28468-5d48-42b5-b85a-7d27cdd9fc4b-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:26:05 crc kubenswrapper[4631]: I1204 18:26:05.945284 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fc28468-5d48-42b5-b85a-7d27cdd9fc4b-kube-api-access-9nr74" (OuterVolumeSpecName: "kube-api-access-9nr74") pod "0fc28468-5d48-42b5-b85a-7d27cdd9fc4b" (UID: "0fc28468-5d48-42b5-b85a-7d27cdd9fc4b"). InnerVolumeSpecName "kube-api-access-9nr74". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:26:05 crc kubenswrapper[4631]: I1204 18:26:05.963255 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fc28468-5d48-42b5-b85a-7d27cdd9fc4b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0fc28468-5d48-42b5-b85a-7d27cdd9fc4b" (UID: "0fc28468-5d48-42b5-b85a-7d27cdd9fc4b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:26:06 crc kubenswrapper[4631]: I1204 18:26:06.039668 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc28468-5d48-42b5-b85a-7d27cdd9fc4b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:26:06 crc kubenswrapper[4631]: I1204 18:26:06.039708 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9nr74\" (UniqueName: \"kubernetes.io/projected/0fc28468-5d48-42b5-b85a-7d27cdd9fc4b-kube-api-access-9nr74\") on node \"crc\" DevicePath \"\"" Dec 04 18:26:06 crc kubenswrapper[4631]: I1204 18:26:06.493517 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7q8lz" event={"ID":"0fc28468-5d48-42b5-b85a-7d27cdd9fc4b","Type":"ContainerDied","Data":"398948ef7d54697c5195ffe2b63356834a929a0b5113311503c1cb2bb70a911c"} Dec 04 18:26:06 crc kubenswrapper[4631]: I1204 18:26:06.493568 4631 scope.go:117] "RemoveContainer" containerID="247f2fc0ddd3229d2aef84cd3d9bf60e900f8504e7a34713579e33939cbe89bb" Dec 04 18:26:06 crc kubenswrapper[4631]: I1204 18:26:06.493579 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7q8lz" Dec 04 18:26:06 crc kubenswrapper[4631]: I1204 18:26:06.524603 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7q8lz"] Dec 04 18:26:06 crc kubenswrapper[4631]: I1204 18:26:06.524691 4631 scope.go:117] "RemoveContainer" containerID="e6a866f6d828b5cd9346fc33f45689e0df39ce6f3c578cae2edd291f1f478c0d" Dec 04 18:26:06 crc kubenswrapper[4631]: I1204 18:26:06.533696 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7q8lz"] Dec 04 18:26:06 crc kubenswrapper[4631]: I1204 18:26:06.544188 4631 scope.go:117] "RemoveContainer" containerID="2488971c0631d4053e4335affb909b035e44590bc7992d99c796fcf38c9ba3fa" Dec 04 18:26:08 crc kubenswrapper[4631]: I1204 18:26:08.254004 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fc28468-5d48-42b5-b85a-7d27cdd9fc4b" path="/var/lib/kubelet/pods/0fc28468-5d48-42b5-b85a-7d27cdd9fc4b/volumes" Dec 04 18:27:36 crc kubenswrapper[4631]: I1204 18:27:36.023257 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:27:36 crc kubenswrapper[4631]: I1204 18:27:36.023822 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:27:41 crc kubenswrapper[4631]: I1204 18:27:41.757481 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-c29tj"] Dec 04 18:27:41 crc kubenswrapper[4631]: E1204 18:27:41.761526 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fc28468-5d48-42b5-b85a-7d27cdd9fc4b" containerName="extract-content" Dec 04 18:27:41 crc kubenswrapper[4631]: I1204 18:27:41.761576 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fc28468-5d48-42b5-b85a-7d27cdd9fc4b" containerName="extract-content" Dec 04 18:27:41 crc kubenswrapper[4631]: E1204 18:27:41.761615 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fc28468-5d48-42b5-b85a-7d27cdd9fc4b" containerName="extract-utilities" Dec 04 18:27:41 crc kubenswrapper[4631]: I1204 18:27:41.761624 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fc28468-5d48-42b5-b85a-7d27cdd9fc4b" containerName="extract-utilities" Dec 04 18:27:41 crc kubenswrapper[4631]: E1204 18:27:41.761663 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fc28468-5d48-42b5-b85a-7d27cdd9fc4b" containerName="registry-server" Dec 04 18:27:41 crc kubenswrapper[4631]: I1204 18:27:41.761671 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fc28468-5d48-42b5-b85a-7d27cdd9fc4b" containerName="registry-server" Dec 04 18:27:41 crc kubenswrapper[4631]: I1204 18:27:41.762186 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fc28468-5d48-42b5-b85a-7d27cdd9fc4b" containerName="registry-server" Dec 04 18:27:41 crc kubenswrapper[4631]: I1204 18:27:41.782161 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c29tj" Dec 04 18:27:41 crc kubenswrapper[4631]: I1204 18:27:41.845602 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c29tj"] Dec 04 18:27:41 crc kubenswrapper[4631]: I1204 18:27:41.855930 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acce1e1a-bb46-44c9-8383-3c6dac703a36-catalog-content\") pod \"community-operators-c29tj\" (UID: \"acce1e1a-bb46-44c9-8383-3c6dac703a36\") " pod="openshift-marketplace/community-operators-c29tj" Dec 04 18:27:41 crc kubenswrapper[4631]: I1204 18:27:41.856075 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzxp7\" (UniqueName: \"kubernetes.io/projected/acce1e1a-bb46-44c9-8383-3c6dac703a36-kube-api-access-xzxp7\") pod \"community-operators-c29tj\" (UID: \"acce1e1a-bb46-44c9-8383-3c6dac703a36\") " pod="openshift-marketplace/community-operators-c29tj" Dec 04 18:27:41 crc kubenswrapper[4631]: I1204 18:27:41.856125 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acce1e1a-bb46-44c9-8383-3c6dac703a36-utilities\") pod \"community-operators-c29tj\" (UID: \"acce1e1a-bb46-44c9-8383-3c6dac703a36\") " pod="openshift-marketplace/community-operators-c29tj" Dec 04 18:27:41 crc kubenswrapper[4631]: I1204 18:27:41.958720 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzxp7\" (UniqueName: \"kubernetes.io/projected/acce1e1a-bb46-44c9-8383-3c6dac703a36-kube-api-access-xzxp7\") pod \"community-operators-c29tj\" (UID: \"acce1e1a-bb46-44c9-8383-3c6dac703a36\") " pod="openshift-marketplace/community-operators-c29tj" Dec 04 18:27:41 crc kubenswrapper[4631]: I1204 18:27:41.959338 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acce1e1a-bb46-44c9-8383-3c6dac703a36-utilities\") pod \"community-operators-c29tj\" (UID: \"acce1e1a-bb46-44c9-8383-3c6dac703a36\") " pod="openshift-marketplace/community-operators-c29tj" Dec 04 18:27:41 crc kubenswrapper[4631]: I1204 18:27:41.959511 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acce1e1a-bb46-44c9-8383-3c6dac703a36-catalog-content\") pod \"community-operators-c29tj\" (UID: \"acce1e1a-bb46-44c9-8383-3c6dac703a36\") " pod="openshift-marketplace/community-operators-c29tj" Dec 04 18:27:41 crc kubenswrapper[4631]: I1204 18:27:41.959921 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acce1e1a-bb46-44c9-8383-3c6dac703a36-utilities\") pod \"community-operators-c29tj\" (UID: \"acce1e1a-bb46-44c9-8383-3c6dac703a36\") " pod="openshift-marketplace/community-operators-c29tj" Dec 04 18:27:41 crc kubenswrapper[4631]: I1204 18:27:41.960080 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acce1e1a-bb46-44c9-8383-3c6dac703a36-catalog-content\") pod \"community-operators-c29tj\" (UID: \"acce1e1a-bb46-44c9-8383-3c6dac703a36\") " pod="openshift-marketplace/community-operators-c29tj" Dec 04 18:27:41 crc kubenswrapper[4631]: I1204 18:27:41.998316 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzxp7\" (UniqueName: \"kubernetes.io/projected/acce1e1a-bb46-44c9-8383-3c6dac703a36-kube-api-access-xzxp7\") pod \"community-operators-c29tj\" (UID: \"acce1e1a-bb46-44c9-8383-3c6dac703a36\") " pod="openshift-marketplace/community-operators-c29tj" Dec 04 18:27:42 crc kubenswrapper[4631]: I1204 18:27:42.132335 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c29tj" Dec 04 18:27:42 crc kubenswrapper[4631]: I1204 18:27:42.704426 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c29tj"] Dec 04 18:27:43 crc kubenswrapper[4631]: I1204 18:27:43.426319 4631 generic.go:334] "Generic (PLEG): container finished" podID="acce1e1a-bb46-44c9-8383-3c6dac703a36" containerID="3763e9c24451895a19cab2519fed1da8d140514819844566831ca84ec3b0a27c" exitCode=0 Dec 04 18:27:43 crc kubenswrapper[4631]: I1204 18:27:43.426855 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c29tj" event={"ID":"acce1e1a-bb46-44c9-8383-3c6dac703a36","Type":"ContainerDied","Data":"3763e9c24451895a19cab2519fed1da8d140514819844566831ca84ec3b0a27c"} Dec 04 18:27:43 crc kubenswrapper[4631]: I1204 18:27:43.426977 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c29tj" event={"ID":"acce1e1a-bb46-44c9-8383-3c6dac703a36","Type":"ContainerStarted","Data":"c5735fdbafb3d716cdd419daff814fbac834ead35b7a5d19fd4fb17934731356"} Dec 04 18:27:44 crc kubenswrapper[4631]: I1204 18:27:44.435838 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c29tj" event={"ID":"acce1e1a-bb46-44c9-8383-3c6dac703a36","Type":"ContainerStarted","Data":"25c49459c2e28fd753f31f5404112a45f454ab7242d3b4e25334adb1619825cd"} Dec 04 18:27:46 crc kubenswrapper[4631]: I1204 18:27:46.457009 4631 generic.go:334] "Generic (PLEG): container finished" podID="acce1e1a-bb46-44c9-8383-3c6dac703a36" containerID="25c49459c2e28fd753f31f5404112a45f454ab7242d3b4e25334adb1619825cd" exitCode=0 Dec 04 18:27:46 crc kubenswrapper[4631]: I1204 18:27:46.457511 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c29tj" event={"ID":"acce1e1a-bb46-44c9-8383-3c6dac703a36","Type":"ContainerDied","Data":"25c49459c2e28fd753f31f5404112a45f454ab7242d3b4e25334adb1619825cd"} Dec 04 18:27:47 crc kubenswrapper[4631]: I1204 18:27:47.469916 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c29tj" event={"ID":"acce1e1a-bb46-44c9-8383-3c6dac703a36","Type":"ContainerStarted","Data":"c546ddb979e6fc61cd33da1f99d00ebf5b95098939a4480857c218fc58adf149"} Dec 04 18:27:47 crc kubenswrapper[4631]: I1204 18:27:47.498336 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-c29tj" podStartSLOduration=3.04234721 podStartE2EDuration="6.498317258s" podCreationTimestamp="2025-12-04 18:27:41 +0000 UTC" firstStartedPulling="2025-12-04 18:27:43.428848155 +0000 UTC m=+3593.461090153" lastFinishedPulling="2025-12-04 18:27:46.884818203 +0000 UTC m=+3596.917060201" observedRunningTime="2025-12-04 18:27:47.488754495 +0000 UTC m=+3597.520996493" watchObservedRunningTime="2025-12-04 18:27:47.498317258 +0000 UTC m=+3597.530559256" Dec 04 18:27:52 crc kubenswrapper[4631]: I1204 18:27:52.133567 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-c29tj" Dec 04 18:27:52 crc kubenswrapper[4631]: I1204 18:27:52.134150 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-c29tj" Dec 04 18:27:52 crc kubenswrapper[4631]: I1204 18:27:52.180740 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-c29tj" Dec 04 18:27:52 crc kubenswrapper[4631]: I1204 18:27:52.564039 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-c29tj" Dec 04 18:27:52 crc kubenswrapper[4631]: I1204 18:27:52.615202 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c29tj"] Dec 04 18:27:54 crc kubenswrapper[4631]: I1204 18:27:54.528059 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-c29tj" podUID="acce1e1a-bb46-44c9-8383-3c6dac703a36" containerName="registry-server" containerID="cri-o://c546ddb979e6fc61cd33da1f99d00ebf5b95098939a4480857c218fc58adf149" gracePeriod=2 Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.202240 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c29tj" Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.232450 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acce1e1a-bb46-44c9-8383-3c6dac703a36-utilities\") pod \"acce1e1a-bb46-44c9-8383-3c6dac703a36\" (UID: \"acce1e1a-bb46-44c9-8383-3c6dac703a36\") " Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.232615 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acce1e1a-bb46-44c9-8383-3c6dac703a36-catalog-content\") pod \"acce1e1a-bb46-44c9-8383-3c6dac703a36\" (UID: \"acce1e1a-bb46-44c9-8383-3c6dac703a36\") " Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.232722 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzxp7\" (UniqueName: \"kubernetes.io/projected/acce1e1a-bb46-44c9-8383-3c6dac703a36-kube-api-access-xzxp7\") pod \"acce1e1a-bb46-44c9-8383-3c6dac703a36\" (UID: \"acce1e1a-bb46-44c9-8383-3c6dac703a36\") " Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.233502 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acce1e1a-bb46-44c9-8383-3c6dac703a36-utilities" (OuterVolumeSpecName: "utilities") pod "acce1e1a-bb46-44c9-8383-3c6dac703a36" (UID: "acce1e1a-bb46-44c9-8383-3c6dac703a36"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.277355 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acce1e1a-bb46-44c9-8383-3c6dac703a36-kube-api-access-xzxp7" (OuterVolumeSpecName: "kube-api-access-xzxp7") pod "acce1e1a-bb46-44c9-8383-3c6dac703a36" (UID: "acce1e1a-bb46-44c9-8383-3c6dac703a36"). InnerVolumeSpecName "kube-api-access-xzxp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.302244 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acce1e1a-bb46-44c9-8383-3c6dac703a36-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "acce1e1a-bb46-44c9-8383-3c6dac703a36" (UID: "acce1e1a-bb46-44c9-8383-3c6dac703a36"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.335092 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/acce1e1a-bb46-44c9-8383-3c6dac703a36-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.335130 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzxp7\" (UniqueName: \"kubernetes.io/projected/acce1e1a-bb46-44c9-8383-3c6dac703a36-kube-api-access-xzxp7\") on node \"crc\" DevicePath \"\"" Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.335146 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/acce1e1a-bb46-44c9-8383-3c6dac703a36-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.538947 4631 generic.go:334] "Generic (PLEG): container finished" podID="acce1e1a-bb46-44c9-8383-3c6dac703a36" containerID="c546ddb979e6fc61cd33da1f99d00ebf5b95098939a4480857c218fc58adf149" exitCode=0 Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.538998 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c29tj" event={"ID":"acce1e1a-bb46-44c9-8383-3c6dac703a36","Type":"ContainerDied","Data":"c546ddb979e6fc61cd33da1f99d00ebf5b95098939a4480857c218fc58adf149"} Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.539038 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c29tj" event={"ID":"acce1e1a-bb46-44c9-8383-3c6dac703a36","Type":"ContainerDied","Data":"c5735fdbafb3d716cdd419daff814fbac834ead35b7a5d19fd4fb17934731356"} Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.539058 4631 scope.go:117] "RemoveContainer" containerID="c546ddb979e6fc61cd33da1f99d00ebf5b95098939a4480857c218fc58adf149" Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.540206 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c29tj" Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.569229 4631 scope.go:117] "RemoveContainer" containerID="25c49459c2e28fd753f31f5404112a45f454ab7242d3b4e25334adb1619825cd" Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.575917 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c29tj"] Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.584170 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-c29tj"] Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.614008 4631 scope.go:117] "RemoveContainer" containerID="3763e9c24451895a19cab2519fed1da8d140514819844566831ca84ec3b0a27c" Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.690221 4631 scope.go:117] "RemoveContainer" containerID="c546ddb979e6fc61cd33da1f99d00ebf5b95098939a4480857c218fc58adf149" Dec 04 18:27:55 crc kubenswrapper[4631]: E1204 18:27:55.690686 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c546ddb979e6fc61cd33da1f99d00ebf5b95098939a4480857c218fc58adf149\": container with ID starting with c546ddb979e6fc61cd33da1f99d00ebf5b95098939a4480857c218fc58adf149 not found: ID does not exist" containerID="c546ddb979e6fc61cd33da1f99d00ebf5b95098939a4480857c218fc58adf149" Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.690718 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c546ddb979e6fc61cd33da1f99d00ebf5b95098939a4480857c218fc58adf149"} err="failed to get container status \"c546ddb979e6fc61cd33da1f99d00ebf5b95098939a4480857c218fc58adf149\": rpc error: code = NotFound desc = could not find container \"c546ddb979e6fc61cd33da1f99d00ebf5b95098939a4480857c218fc58adf149\": container with ID starting with c546ddb979e6fc61cd33da1f99d00ebf5b95098939a4480857c218fc58adf149 not found: ID does not exist" Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.690738 4631 scope.go:117] "RemoveContainer" containerID="25c49459c2e28fd753f31f5404112a45f454ab7242d3b4e25334adb1619825cd" Dec 04 18:27:55 crc kubenswrapper[4631]: E1204 18:27:55.691091 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25c49459c2e28fd753f31f5404112a45f454ab7242d3b4e25334adb1619825cd\": container with ID starting with 25c49459c2e28fd753f31f5404112a45f454ab7242d3b4e25334adb1619825cd not found: ID does not exist" containerID="25c49459c2e28fd753f31f5404112a45f454ab7242d3b4e25334adb1619825cd" Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.691205 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25c49459c2e28fd753f31f5404112a45f454ab7242d3b4e25334adb1619825cd"} err="failed to get container status \"25c49459c2e28fd753f31f5404112a45f454ab7242d3b4e25334adb1619825cd\": rpc error: code = NotFound desc = could not find container \"25c49459c2e28fd753f31f5404112a45f454ab7242d3b4e25334adb1619825cd\": container with ID starting with 25c49459c2e28fd753f31f5404112a45f454ab7242d3b4e25334adb1619825cd not found: ID does not exist" Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.691294 4631 scope.go:117] "RemoveContainer" containerID="3763e9c24451895a19cab2519fed1da8d140514819844566831ca84ec3b0a27c" Dec 04 18:27:55 crc kubenswrapper[4631]: E1204 18:27:55.691648 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3763e9c24451895a19cab2519fed1da8d140514819844566831ca84ec3b0a27c\": container with ID starting with 3763e9c24451895a19cab2519fed1da8d140514819844566831ca84ec3b0a27c not found: ID does not exist" containerID="3763e9c24451895a19cab2519fed1da8d140514819844566831ca84ec3b0a27c" Dec 04 18:27:55 crc kubenswrapper[4631]: I1204 18:27:55.691669 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3763e9c24451895a19cab2519fed1da8d140514819844566831ca84ec3b0a27c"} err="failed to get container status \"3763e9c24451895a19cab2519fed1da8d140514819844566831ca84ec3b0a27c\": rpc error: code = NotFound desc = could not find container \"3763e9c24451895a19cab2519fed1da8d140514819844566831ca84ec3b0a27c\": container with ID starting with 3763e9c24451895a19cab2519fed1da8d140514819844566831ca84ec3b0a27c not found: ID does not exist" Dec 04 18:27:56 crc kubenswrapper[4631]: I1204 18:27:56.251014 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acce1e1a-bb46-44c9-8383-3c6dac703a36" path="/var/lib/kubelet/pods/acce1e1a-bb46-44c9-8383-3c6dac703a36/volumes" Dec 04 18:28:06 crc kubenswrapper[4631]: I1204 18:28:06.023049 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:28:06 crc kubenswrapper[4631]: I1204 18:28:06.023585 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:28:36 crc kubenswrapper[4631]: I1204 18:28:36.023247 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:28:36 crc kubenswrapper[4631]: I1204 18:28:36.024140 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:28:36 crc kubenswrapper[4631]: I1204 18:28:36.024212 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 18:28:36 crc kubenswrapper[4631]: I1204 18:28:36.025550 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa"} pod="openshift-machine-config-operator/machine-config-daemon-q27wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 18:28:36 crc kubenswrapper[4631]: I1204 18:28:36.025674 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" containerID="cri-o://ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" gracePeriod=600 Dec 04 18:28:36 crc kubenswrapper[4631]: E1204 18:28:36.159896 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:28:36 crc kubenswrapper[4631]: I1204 18:28:36.926665 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" exitCode=0 Dec 04 18:28:36 crc kubenswrapper[4631]: I1204 18:28:36.926718 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerDied","Data":"ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa"} Dec 04 18:28:36 crc kubenswrapper[4631]: I1204 18:28:36.926756 4631 scope.go:117] "RemoveContainer" containerID="f77a279325343725c665491a0a3f0ab9d510d4cf2851fa6c24cd73fd7d33dbfe" Dec 04 18:28:36 crc kubenswrapper[4631]: I1204 18:28:36.928519 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:28:36 crc kubenswrapper[4631]: E1204 18:28:36.929569 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:28:51 crc kubenswrapper[4631]: I1204 18:28:51.239797 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:28:51 crc kubenswrapper[4631]: E1204 18:28:51.240518 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:29:06 crc kubenswrapper[4631]: I1204 18:29:06.239263 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:29:06 crc kubenswrapper[4631]: E1204 18:29:06.240249 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:29:17 crc kubenswrapper[4631]: I1204 18:29:17.239905 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:29:17 crc kubenswrapper[4631]: E1204 18:29:17.241323 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:29:31 crc kubenswrapper[4631]: I1204 18:29:31.240446 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:29:31 crc kubenswrapper[4631]: E1204 18:29:31.241294 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:29:46 crc kubenswrapper[4631]: I1204 18:29:46.240049 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:29:46 crc kubenswrapper[4631]: E1204 18:29:46.240890 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.159791 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414550-l2pcj"] Dec 04 18:30:00 crc kubenswrapper[4631]: E1204 18:30:00.160671 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acce1e1a-bb46-44c9-8383-3c6dac703a36" containerName="registry-server" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.160689 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="acce1e1a-bb46-44c9-8383-3c6dac703a36" containerName="registry-server" Dec 04 18:30:00 crc kubenswrapper[4631]: E1204 18:30:00.160712 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acce1e1a-bb46-44c9-8383-3c6dac703a36" containerName="extract-utilities" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.160721 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="acce1e1a-bb46-44c9-8383-3c6dac703a36" containerName="extract-utilities" Dec 04 18:30:00 crc kubenswrapper[4631]: E1204 18:30:00.160742 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acce1e1a-bb46-44c9-8383-3c6dac703a36" containerName="extract-content" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.160753 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="acce1e1a-bb46-44c9-8383-3c6dac703a36" containerName="extract-content" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.160962 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="acce1e1a-bb46-44c9-8383-3c6dac703a36" containerName="registry-server" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.161590 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414550-l2pcj" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.168694 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.209484 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.251969 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:30:00 crc kubenswrapper[4631]: E1204 18:30:00.252883 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.264873 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414550-l2pcj"] Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.322194 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8a182546-60b0-4997-bf96-acccd9aa715f-secret-volume\") pod \"collect-profiles-29414550-l2pcj\" (UID: \"8a182546-60b0-4997-bf96-acccd9aa715f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414550-l2pcj" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.322250 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8a182546-60b0-4997-bf96-acccd9aa715f-config-volume\") pod \"collect-profiles-29414550-l2pcj\" (UID: \"8a182546-60b0-4997-bf96-acccd9aa715f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414550-l2pcj" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.322309 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjvgc\" (UniqueName: \"kubernetes.io/projected/8a182546-60b0-4997-bf96-acccd9aa715f-kube-api-access-mjvgc\") pod \"collect-profiles-29414550-l2pcj\" (UID: \"8a182546-60b0-4997-bf96-acccd9aa715f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414550-l2pcj" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.424076 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8a182546-60b0-4997-bf96-acccd9aa715f-secret-volume\") pod \"collect-profiles-29414550-l2pcj\" (UID: \"8a182546-60b0-4997-bf96-acccd9aa715f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414550-l2pcj" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.424147 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8a182546-60b0-4997-bf96-acccd9aa715f-config-volume\") pod \"collect-profiles-29414550-l2pcj\" (UID: \"8a182546-60b0-4997-bf96-acccd9aa715f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414550-l2pcj" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.424190 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjvgc\" (UniqueName: \"kubernetes.io/projected/8a182546-60b0-4997-bf96-acccd9aa715f-kube-api-access-mjvgc\") pod \"collect-profiles-29414550-l2pcj\" (UID: \"8a182546-60b0-4997-bf96-acccd9aa715f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414550-l2pcj" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.427129 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8a182546-60b0-4997-bf96-acccd9aa715f-config-volume\") pod \"collect-profiles-29414550-l2pcj\" (UID: \"8a182546-60b0-4997-bf96-acccd9aa715f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414550-l2pcj" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.444623 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8a182546-60b0-4997-bf96-acccd9aa715f-secret-volume\") pod \"collect-profiles-29414550-l2pcj\" (UID: \"8a182546-60b0-4997-bf96-acccd9aa715f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414550-l2pcj" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.449400 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjvgc\" (UniqueName: \"kubernetes.io/projected/8a182546-60b0-4997-bf96-acccd9aa715f-kube-api-access-mjvgc\") pod \"collect-profiles-29414550-l2pcj\" (UID: \"8a182546-60b0-4997-bf96-acccd9aa715f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414550-l2pcj" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.486316 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414550-l2pcj" Dec 04 18:30:00 crc kubenswrapper[4631]: I1204 18:30:00.961058 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414550-l2pcj"] Dec 04 18:30:01 crc kubenswrapper[4631]: I1204 18:30:01.726977 4631 generic.go:334] "Generic (PLEG): container finished" podID="8a182546-60b0-4997-bf96-acccd9aa715f" containerID="9f6238c7dda0ba8b306b05ccc496218be0b402eb328a9ebcd09c0abfa4c31326" exitCode=0 Dec 04 18:30:01 crc kubenswrapper[4631]: I1204 18:30:01.727145 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414550-l2pcj" event={"ID":"8a182546-60b0-4997-bf96-acccd9aa715f","Type":"ContainerDied","Data":"9f6238c7dda0ba8b306b05ccc496218be0b402eb328a9ebcd09c0abfa4c31326"} Dec 04 18:30:01 crc kubenswrapper[4631]: I1204 18:30:01.727257 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414550-l2pcj" event={"ID":"8a182546-60b0-4997-bf96-acccd9aa715f","Type":"ContainerStarted","Data":"d356b5068db8d50a033f86696201ecfa11fb6d7369386a25c9dee2b4028b8aa7"} Dec 04 18:30:03 crc kubenswrapper[4631]: I1204 18:30:03.299990 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414550-l2pcj" Dec 04 18:30:03 crc kubenswrapper[4631]: I1204 18:30:03.380985 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8a182546-60b0-4997-bf96-acccd9aa715f-config-volume\") pod \"8a182546-60b0-4997-bf96-acccd9aa715f\" (UID: \"8a182546-60b0-4997-bf96-acccd9aa715f\") " Dec 04 18:30:03 crc kubenswrapper[4631]: I1204 18:30:03.381066 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mjvgc\" (UniqueName: \"kubernetes.io/projected/8a182546-60b0-4997-bf96-acccd9aa715f-kube-api-access-mjvgc\") pod \"8a182546-60b0-4997-bf96-acccd9aa715f\" (UID: \"8a182546-60b0-4997-bf96-acccd9aa715f\") " Dec 04 18:30:03 crc kubenswrapper[4631]: I1204 18:30:03.381097 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8a182546-60b0-4997-bf96-acccd9aa715f-secret-volume\") pod \"8a182546-60b0-4997-bf96-acccd9aa715f\" (UID: \"8a182546-60b0-4997-bf96-acccd9aa715f\") " Dec 04 18:30:03 crc kubenswrapper[4631]: I1204 18:30:03.383048 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a182546-60b0-4997-bf96-acccd9aa715f-config-volume" (OuterVolumeSpecName: "config-volume") pod "8a182546-60b0-4997-bf96-acccd9aa715f" (UID: "8a182546-60b0-4997-bf96-acccd9aa715f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 18:30:03 crc kubenswrapper[4631]: I1204 18:30:03.389948 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a182546-60b0-4997-bf96-acccd9aa715f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8a182546-60b0-4997-bf96-acccd9aa715f" (UID: "8a182546-60b0-4997-bf96-acccd9aa715f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:30:03 crc kubenswrapper[4631]: I1204 18:30:03.390865 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a182546-60b0-4997-bf96-acccd9aa715f-kube-api-access-mjvgc" (OuterVolumeSpecName: "kube-api-access-mjvgc") pod "8a182546-60b0-4997-bf96-acccd9aa715f" (UID: "8a182546-60b0-4997-bf96-acccd9aa715f"). InnerVolumeSpecName "kube-api-access-mjvgc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:30:03 crc kubenswrapper[4631]: I1204 18:30:03.483166 4631 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8a182546-60b0-4997-bf96-acccd9aa715f-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 18:30:03 crc kubenswrapper[4631]: I1204 18:30:03.483203 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mjvgc\" (UniqueName: \"kubernetes.io/projected/8a182546-60b0-4997-bf96-acccd9aa715f-kube-api-access-mjvgc\") on node \"crc\" DevicePath \"\"" Dec 04 18:30:03 crc kubenswrapper[4631]: I1204 18:30:03.483215 4631 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8a182546-60b0-4997-bf96-acccd9aa715f-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 04 18:30:03 crc kubenswrapper[4631]: I1204 18:30:03.743253 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414550-l2pcj" event={"ID":"8a182546-60b0-4997-bf96-acccd9aa715f","Type":"ContainerDied","Data":"d356b5068db8d50a033f86696201ecfa11fb6d7369386a25c9dee2b4028b8aa7"} Dec 04 18:30:03 crc kubenswrapper[4631]: I1204 18:30:03.743296 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d356b5068db8d50a033f86696201ecfa11fb6d7369386a25c9dee2b4028b8aa7" Dec 04 18:30:03 crc kubenswrapper[4631]: I1204 18:30:03.743366 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414550-l2pcj" Dec 04 18:30:04 crc kubenswrapper[4631]: I1204 18:30:04.386526 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf"] Dec 04 18:30:04 crc kubenswrapper[4631]: I1204 18:30:04.396954 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414505-mm4zf"] Dec 04 18:30:06 crc kubenswrapper[4631]: I1204 18:30:06.254738 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2428162f-1bea-4dc0-8a79-37c806367a59" path="/var/lib/kubelet/pods/2428162f-1bea-4dc0-8a79-37c806367a59/volumes" Dec 04 18:30:13 crc kubenswrapper[4631]: I1204 18:30:13.240073 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:30:13 crc kubenswrapper[4631]: E1204 18:30:13.240939 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:30:23 crc kubenswrapper[4631]: I1204 18:30:23.976860 4631 scope.go:117] "RemoveContainer" containerID="8edff1bd06f718da48a64d6b525bdd0775daed1fcb764afaceb6a37c03d0f9e2" Dec 04 18:30:28 crc kubenswrapper[4631]: I1204 18:30:28.240296 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:30:28 crc kubenswrapper[4631]: E1204 18:30:28.241262 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:30:40 crc kubenswrapper[4631]: I1204 18:30:40.245109 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:30:40 crc kubenswrapper[4631]: E1204 18:30:40.245766 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:30:55 crc kubenswrapper[4631]: I1204 18:30:55.239069 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:30:55 crc kubenswrapper[4631]: E1204 18:30:55.240113 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:31:09 crc kubenswrapper[4631]: I1204 18:31:09.239900 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:31:09 crc kubenswrapper[4631]: E1204 18:31:09.240681 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:31:23 crc kubenswrapper[4631]: I1204 18:31:23.239538 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:31:23 crc kubenswrapper[4631]: E1204 18:31:23.240269 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:31:38 crc kubenswrapper[4631]: I1204 18:31:38.244079 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:31:38 crc kubenswrapper[4631]: E1204 18:31:38.246867 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:31:51 crc kubenswrapper[4631]: I1204 18:31:51.239219 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:31:51 crc kubenswrapper[4631]: E1204 18:31:51.240008 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:32:06 crc kubenswrapper[4631]: I1204 18:32:06.243360 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:32:06 crc kubenswrapper[4631]: E1204 18:32:06.244118 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:32:20 crc kubenswrapper[4631]: I1204 18:32:20.245746 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:32:20 crc kubenswrapper[4631]: E1204 18:32:20.247614 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:32:31 crc kubenswrapper[4631]: I1204 18:32:31.240260 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:32:31 crc kubenswrapper[4631]: E1204 18:32:31.241547 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:32:46 crc kubenswrapper[4631]: I1204 18:32:46.239386 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:32:46 crc kubenswrapper[4631]: E1204 18:32:46.240190 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:32:58 crc kubenswrapper[4631]: I1204 18:32:58.239614 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:32:58 crc kubenswrapper[4631]: E1204 18:32:58.240444 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:33:13 crc kubenswrapper[4631]: I1204 18:33:13.239283 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:33:13 crc kubenswrapper[4631]: E1204 18:33:13.240105 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:33:24 crc kubenswrapper[4631]: I1204 18:33:24.239936 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:33:24 crc kubenswrapper[4631]: E1204 18:33:24.240684 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:33:35 crc kubenswrapper[4631]: I1204 18:33:35.239292 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:33:35 crc kubenswrapper[4631]: E1204 18:33:35.240195 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:33:49 crc kubenswrapper[4631]: I1204 18:33:49.239546 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:33:49 crc kubenswrapper[4631]: I1204 18:33:49.965486 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"a08abbadd1666d41c778d93c9168f1cedea0ac0fd4eecfadace71ecc85537949"} Dec 04 18:34:09 crc kubenswrapper[4631]: I1204 18:34:09.375805 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-s2gkx"] Dec 04 18:34:09 crc kubenswrapper[4631]: E1204 18:34:09.376864 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a182546-60b0-4997-bf96-acccd9aa715f" containerName="collect-profiles" Dec 04 18:34:09 crc kubenswrapper[4631]: I1204 18:34:09.376885 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a182546-60b0-4997-bf96-acccd9aa715f" containerName="collect-profiles" Dec 04 18:34:09 crc kubenswrapper[4631]: I1204 18:34:09.377156 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a182546-60b0-4997-bf96-acccd9aa715f" containerName="collect-profiles" Dec 04 18:34:09 crc kubenswrapper[4631]: I1204 18:34:09.382013 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s2gkx" Dec 04 18:34:09 crc kubenswrapper[4631]: I1204 18:34:09.395558 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s2gkx"] Dec 04 18:34:09 crc kubenswrapper[4631]: I1204 18:34:09.453769 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89ad0a1b-f6c5-4488-bd11-ad675a56c288-utilities\") pod \"certified-operators-s2gkx\" (UID: \"89ad0a1b-f6c5-4488-bd11-ad675a56c288\") " pod="openshift-marketplace/certified-operators-s2gkx" Dec 04 18:34:09 crc kubenswrapper[4631]: I1204 18:34:09.454015 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89ad0a1b-f6c5-4488-bd11-ad675a56c288-catalog-content\") pod \"certified-operators-s2gkx\" (UID: \"89ad0a1b-f6c5-4488-bd11-ad675a56c288\") " pod="openshift-marketplace/certified-operators-s2gkx" Dec 04 18:34:09 crc kubenswrapper[4631]: I1204 18:34:09.454151 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmzdn\" (UniqueName: \"kubernetes.io/projected/89ad0a1b-f6c5-4488-bd11-ad675a56c288-kube-api-access-tmzdn\") pod \"certified-operators-s2gkx\" (UID: \"89ad0a1b-f6c5-4488-bd11-ad675a56c288\") " pod="openshift-marketplace/certified-operators-s2gkx" Dec 04 18:34:09 crc kubenswrapper[4631]: I1204 18:34:09.555813 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmzdn\" (UniqueName: \"kubernetes.io/projected/89ad0a1b-f6c5-4488-bd11-ad675a56c288-kube-api-access-tmzdn\") pod \"certified-operators-s2gkx\" (UID: \"89ad0a1b-f6c5-4488-bd11-ad675a56c288\") " pod="openshift-marketplace/certified-operators-s2gkx" Dec 04 18:34:09 crc kubenswrapper[4631]: I1204 18:34:09.556007 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89ad0a1b-f6c5-4488-bd11-ad675a56c288-utilities\") pod \"certified-operators-s2gkx\" (UID: \"89ad0a1b-f6c5-4488-bd11-ad675a56c288\") " pod="openshift-marketplace/certified-operators-s2gkx" Dec 04 18:34:09 crc kubenswrapper[4631]: I1204 18:34:09.556032 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89ad0a1b-f6c5-4488-bd11-ad675a56c288-catalog-content\") pod \"certified-operators-s2gkx\" (UID: \"89ad0a1b-f6c5-4488-bd11-ad675a56c288\") " pod="openshift-marketplace/certified-operators-s2gkx" Dec 04 18:34:09 crc kubenswrapper[4631]: I1204 18:34:09.556800 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89ad0a1b-f6c5-4488-bd11-ad675a56c288-catalog-content\") pod \"certified-operators-s2gkx\" (UID: \"89ad0a1b-f6c5-4488-bd11-ad675a56c288\") " pod="openshift-marketplace/certified-operators-s2gkx" Dec 04 18:34:09 crc kubenswrapper[4631]: I1204 18:34:09.556814 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89ad0a1b-f6c5-4488-bd11-ad675a56c288-utilities\") pod \"certified-operators-s2gkx\" (UID: \"89ad0a1b-f6c5-4488-bd11-ad675a56c288\") " pod="openshift-marketplace/certified-operators-s2gkx" Dec 04 18:34:09 crc kubenswrapper[4631]: I1204 18:34:09.578973 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmzdn\" (UniqueName: \"kubernetes.io/projected/89ad0a1b-f6c5-4488-bd11-ad675a56c288-kube-api-access-tmzdn\") pod \"certified-operators-s2gkx\" (UID: \"89ad0a1b-f6c5-4488-bd11-ad675a56c288\") " pod="openshift-marketplace/certified-operators-s2gkx" Dec 04 18:34:09 crc kubenswrapper[4631]: I1204 18:34:09.700062 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s2gkx" Dec 04 18:34:10 crc kubenswrapper[4631]: I1204 18:34:10.384120 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s2gkx"] Dec 04 18:34:10 crc kubenswrapper[4631]: W1204 18:34:10.391920 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod89ad0a1b_f6c5_4488_bd11_ad675a56c288.slice/crio-836fda2999ce13e7eb2a9e2268869aed8517f99129c8e4ac0ce98860a0ccd9f6 WatchSource:0}: Error finding container 836fda2999ce13e7eb2a9e2268869aed8517f99129c8e4ac0ce98860a0ccd9f6: Status 404 returned error can't find the container with id 836fda2999ce13e7eb2a9e2268869aed8517f99129c8e4ac0ce98860a0ccd9f6 Dec 04 18:34:11 crc kubenswrapper[4631]: I1204 18:34:11.194973 4631 generic.go:334] "Generic (PLEG): container finished" podID="89ad0a1b-f6c5-4488-bd11-ad675a56c288" containerID="30a73686c8ef5434bb298aea6f05eca4c0bf66408c79a1bb028850ca40b6b52c" exitCode=0 Dec 04 18:34:11 crc kubenswrapper[4631]: I1204 18:34:11.195553 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s2gkx" event={"ID":"89ad0a1b-f6c5-4488-bd11-ad675a56c288","Type":"ContainerDied","Data":"30a73686c8ef5434bb298aea6f05eca4c0bf66408c79a1bb028850ca40b6b52c"} Dec 04 18:34:11 crc kubenswrapper[4631]: I1204 18:34:11.195598 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s2gkx" event={"ID":"89ad0a1b-f6c5-4488-bd11-ad675a56c288","Type":"ContainerStarted","Data":"836fda2999ce13e7eb2a9e2268869aed8517f99129c8e4ac0ce98860a0ccd9f6"} Dec 04 18:34:11 crc kubenswrapper[4631]: I1204 18:34:11.197990 4631 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 18:34:12 crc kubenswrapper[4631]: I1204 18:34:12.205046 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s2gkx" event={"ID":"89ad0a1b-f6c5-4488-bd11-ad675a56c288","Type":"ContainerStarted","Data":"bbb3ae34f49fb4df8289f394838e2aafea14137f4fe28f197e259b7d6fa0ea51"} Dec 04 18:34:13 crc kubenswrapper[4631]: I1204 18:34:13.214619 4631 generic.go:334] "Generic (PLEG): container finished" podID="89ad0a1b-f6c5-4488-bd11-ad675a56c288" containerID="bbb3ae34f49fb4df8289f394838e2aafea14137f4fe28f197e259b7d6fa0ea51" exitCode=0 Dec 04 18:34:13 crc kubenswrapper[4631]: I1204 18:34:13.214696 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s2gkx" event={"ID":"89ad0a1b-f6c5-4488-bd11-ad675a56c288","Type":"ContainerDied","Data":"bbb3ae34f49fb4df8289f394838e2aafea14137f4fe28f197e259b7d6fa0ea51"} Dec 04 18:34:14 crc kubenswrapper[4631]: I1204 18:34:14.229717 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s2gkx" event={"ID":"89ad0a1b-f6c5-4488-bd11-ad675a56c288","Type":"ContainerStarted","Data":"7f878a35d3604cde729938632f496a01605c4b9fd377ff48b256c2282d69af76"} Dec 04 18:34:14 crc kubenswrapper[4631]: I1204 18:34:14.251531 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-s2gkx" podStartSLOduration=2.739339911 podStartE2EDuration="5.251510355s" podCreationTimestamp="2025-12-04 18:34:09 +0000 UTC" firstStartedPulling="2025-12-04 18:34:11.197652396 +0000 UTC m=+3981.229894404" lastFinishedPulling="2025-12-04 18:34:13.70982283 +0000 UTC m=+3983.742064848" observedRunningTime="2025-12-04 18:34:14.246336099 +0000 UTC m=+3984.278578097" watchObservedRunningTime="2025-12-04 18:34:14.251510355 +0000 UTC m=+3984.283752353" Dec 04 18:34:19 crc kubenswrapper[4631]: I1204 18:34:19.700912 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-s2gkx" Dec 04 18:34:19 crc kubenswrapper[4631]: I1204 18:34:19.701587 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-s2gkx" Dec 04 18:34:19 crc kubenswrapper[4631]: I1204 18:34:19.761485 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-s2gkx" Dec 04 18:34:20 crc kubenswrapper[4631]: I1204 18:34:20.335856 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-s2gkx" Dec 04 18:34:20 crc kubenswrapper[4631]: I1204 18:34:20.382982 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s2gkx"] Dec 04 18:34:22 crc kubenswrapper[4631]: I1204 18:34:22.304015 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-s2gkx" podUID="89ad0a1b-f6c5-4488-bd11-ad675a56c288" containerName="registry-server" containerID="cri-o://7f878a35d3604cde729938632f496a01605c4b9fd377ff48b256c2282d69af76" gracePeriod=2 Dec 04 18:34:22 crc kubenswrapper[4631]: I1204 18:34:22.939934 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s2gkx" Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.024008 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89ad0a1b-f6c5-4488-bd11-ad675a56c288-catalog-content\") pod \"89ad0a1b-f6c5-4488-bd11-ad675a56c288\" (UID: \"89ad0a1b-f6c5-4488-bd11-ad675a56c288\") " Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.029120 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmzdn\" (UniqueName: \"kubernetes.io/projected/89ad0a1b-f6c5-4488-bd11-ad675a56c288-kube-api-access-tmzdn\") pod \"89ad0a1b-f6c5-4488-bd11-ad675a56c288\" (UID: \"89ad0a1b-f6c5-4488-bd11-ad675a56c288\") " Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.029290 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89ad0a1b-f6c5-4488-bd11-ad675a56c288-utilities\") pod \"89ad0a1b-f6c5-4488-bd11-ad675a56c288\" (UID: \"89ad0a1b-f6c5-4488-bd11-ad675a56c288\") " Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.029847 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89ad0a1b-f6c5-4488-bd11-ad675a56c288-utilities" (OuterVolumeSpecName: "utilities") pod "89ad0a1b-f6c5-4488-bd11-ad675a56c288" (UID: "89ad0a1b-f6c5-4488-bd11-ad675a56c288"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.030161 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89ad0a1b-f6c5-4488-bd11-ad675a56c288-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.035931 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89ad0a1b-f6c5-4488-bd11-ad675a56c288-kube-api-access-tmzdn" (OuterVolumeSpecName: "kube-api-access-tmzdn") pod "89ad0a1b-f6c5-4488-bd11-ad675a56c288" (UID: "89ad0a1b-f6c5-4488-bd11-ad675a56c288"). InnerVolumeSpecName "kube-api-access-tmzdn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.076628 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89ad0a1b-f6c5-4488-bd11-ad675a56c288-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "89ad0a1b-f6c5-4488-bd11-ad675a56c288" (UID: "89ad0a1b-f6c5-4488-bd11-ad675a56c288"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.131943 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89ad0a1b-f6c5-4488-bd11-ad675a56c288-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.131975 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmzdn\" (UniqueName: \"kubernetes.io/projected/89ad0a1b-f6c5-4488-bd11-ad675a56c288-kube-api-access-tmzdn\") on node \"crc\" DevicePath \"\"" Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.314449 4631 generic.go:334] "Generic (PLEG): container finished" podID="89ad0a1b-f6c5-4488-bd11-ad675a56c288" containerID="7f878a35d3604cde729938632f496a01605c4b9fd377ff48b256c2282d69af76" exitCode=0 Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.314483 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s2gkx" event={"ID":"89ad0a1b-f6c5-4488-bd11-ad675a56c288","Type":"ContainerDied","Data":"7f878a35d3604cde729938632f496a01605c4b9fd377ff48b256c2282d69af76"} Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.314499 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s2gkx" Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.314509 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s2gkx" event={"ID":"89ad0a1b-f6c5-4488-bd11-ad675a56c288","Type":"ContainerDied","Data":"836fda2999ce13e7eb2a9e2268869aed8517f99129c8e4ac0ce98860a0ccd9f6"} Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.314526 4631 scope.go:117] "RemoveContainer" containerID="7f878a35d3604cde729938632f496a01605c4b9fd377ff48b256c2282d69af76" Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.341209 4631 scope.go:117] "RemoveContainer" containerID="bbb3ae34f49fb4df8289f394838e2aafea14137f4fe28f197e259b7d6fa0ea51" Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.352537 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s2gkx"] Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.362792 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-s2gkx"] Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.374593 4631 scope.go:117] "RemoveContainer" containerID="30a73686c8ef5434bb298aea6f05eca4c0bf66408c79a1bb028850ca40b6b52c" Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.420731 4631 scope.go:117] "RemoveContainer" containerID="7f878a35d3604cde729938632f496a01605c4b9fd377ff48b256c2282d69af76" Dec 04 18:34:23 crc kubenswrapper[4631]: E1204 18:34:23.421247 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f878a35d3604cde729938632f496a01605c4b9fd377ff48b256c2282d69af76\": container with ID starting with 7f878a35d3604cde729938632f496a01605c4b9fd377ff48b256c2282d69af76 not found: ID does not exist" containerID="7f878a35d3604cde729938632f496a01605c4b9fd377ff48b256c2282d69af76" Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.421357 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f878a35d3604cde729938632f496a01605c4b9fd377ff48b256c2282d69af76"} err="failed to get container status \"7f878a35d3604cde729938632f496a01605c4b9fd377ff48b256c2282d69af76\": rpc error: code = NotFound desc = could not find container \"7f878a35d3604cde729938632f496a01605c4b9fd377ff48b256c2282d69af76\": container with ID starting with 7f878a35d3604cde729938632f496a01605c4b9fd377ff48b256c2282d69af76 not found: ID does not exist" Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.421503 4631 scope.go:117] "RemoveContainer" containerID="bbb3ae34f49fb4df8289f394838e2aafea14137f4fe28f197e259b7d6fa0ea51" Dec 04 18:34:23 crc kubenswrapper[4631]: E1204 18:34:23.422136 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbb3ae34f49fb4df8289f394838e2aafea14137f4fe28f197e259b7d6fa0ea51\": container with ID starting with bbb3ae34f49fb4df8289f394838e2aafea14137f4fe28f197e259b7d6fa0ea51 not found: ID does not exist" containerID="bbb3ae34f49fb4df8289f394838e2aafea14137f4fe28f197e259b7d6fa0ea51" Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.422228 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbb3ae34f49fb4df8289f394838e2aafea14137f4fe28f197e259b7d6fa0ea51"} err="failed to get container status \"bbb3ae34f49fb4df8289f394838e2aafea14137f4fe28f197e259b7d6fa0ea51\": rpc error: code = NotFound desc = could not find container \"bbb3ae34f49fb4df8289f394838e2aafea14137f4fe28f197e259b7d6fa0ea51\": container with ID starting with bbb3ae34f49fb4df8289f394838e2aafea14137f4fe28f197e259b7d6fa0ea51 not found: ID does not exist" Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.422323 4631 scope.go:117] "RemoveContainer" containerID="30a73686c8ef5434bb298aea6f05eca4c0bf66408c79a1bb028850ca40b6b52c" Dec 04 18:34:23 crc kubenswrapper[4631]: E1204 18:34:23.423106 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30a73686c8ef5434bb298aea6f05eca4c0bf66408c79a1bb028850ca40b6b52c\": container with ID starting with 30a73686c8ef5434bb298aea6f05eca4c0bf66408c79a1bb028850ca40b6b52c not found: ID does not exist" containerID="30a73686c8ef5434bb298aea6f05eca4c0bf66408c79a1bb028850ca40b6b52c" Dec 04 18:34:23 crc kubenswrapper[4631]: I1204 18:34:23.423165 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30a73686c8ef5434bb298aea6f05eca4c0bf66408c79a1bb028850ca40b6b52c"} err="failed to get container status \"30a73686c8ef5434bb298aea6f05eca4c0bf66408c79a1bb028850ca40b6b52c\": rpc error: code = NotFound desc = could not find container \"30a73686c8ef5434bb298aea6f05eca4c0bf66408c79a1bb028850ca40b6b52c\": container with ID starting with 30a73686c8ef5434bb298aea6f05eca4c0bf66408c79a1bb028850ca40b6b52c not found: ID does not exist" Dec 04 18:34:24 crc kubenswrapper[4631]: I1204 18:34:24.249062 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89ad0a1b-f6c5-4488-bd11-ad675a56c288" path="/var/lib/kubelet/pods/89ad0a1b-f6c5-4488-bd11-ad675a56c288/volumes" Dec 04 18:34:29 crc kubenswrapper[4631]: I1204 18:34:29.434625 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qq27m"] Dec 04 18:34:29 crc kubenswrapper[4631]: E1204 18:34:29.435572 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89ad0a1b-f6c5-4488-bd11-ad675a56c288" containerName="registry-server" Dec 04 18:34:29 crc kubenswrapper[4631]: I1204 18:34:29.435590 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="89ad0a1b-f6c5-4488-bd11-ad675a56c288" containerName="registry-server" Dec 04 18:34:29 crc kubenswrapper[4631]: E1204 18:34:29.435608 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89ad0a1b-f6c5-4488-bd11-ad675a56c288" containerName="extract-content" Dec 04 18:34:29 crc kubenswrapper[4631]: I1204 18:34:29.435616 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="89ad0a1b-f6c5-4488-bd11-ad675a56c288" containerName="extract-content" Dec 04 18:34:29 crc kubenswrapper[4631]: E1204 18:34:29.435649 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89ad0a1b-f6c5-4488-bd11-ad675a56c288" containerName="extract-utilities" Dec 04 18:34:29 crc kubenswrapper[4631]: I1204 18:34:29.435657 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="89ad0a1b-f6c5-4488-bd11-ad675a56c288" containerName="extract-utilities" Dec 04 18:34:29 crc kubenswrapper[4631]: I1204 18:34:29.435862 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="89ad0a1b-f6c5-4488-bd11-ad675a56c288" containerName="registry-server" Dec 04 18:34:29 crc kubenswrapper[4631]: I1204 18:34:29.437215 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qq27m" Dec 04 18:34:29 crc kubenswrapper[4631]: I1204 18:34:29.449102 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qq27m"] Dec 04 18:34:29 crc kubenswrapper[4631]: I1204 18:34:29.555524 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l69p4\" (UniqueName: \"kubernetes.io/projected/a3e44051-4743-4e6c-b60a-ab474e4cf74e-kube-api-access-l69p4\") pod \"redhat-operators-qq27m\" (UID: \"a3e44051-4743-4e6c-b60a-ab474e4cf74e\") " pod="openshift-marketplace/redhat-operators-qq27m" Dec 04 18:34:29 crc kubenswrapper[4631]: I1204 18:34:29.555616 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3e44051-4743-4e6c-b60a-ab474e4cf74e-catalog-content\") pod \"redhat-operators-qq27m\" (UID: \"a3e44051-4743-4e6c-b60a-ab474e4cf74e\") " pod="openshift-marketplace/redhat-operators-qq27m" Dec 04 18:34:29 crc kubenswrapper[4631]: I1204 18:34:29.555691 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3e44051-4743-4e6c-b60a-ab474e4cf74e-utilities\") pod \"redhat-operators-qq27m\" (UID: \"a3e44051-4743-4e6c-b60a-ab474e4cf74e\") " pod="openshift-marketplace/redhat-operators-qq27m" Dec 04 18:34:29 crc kubenswrapper[4631]: I1204 18:34:29.657793 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l69p4\" (UniqueName: \"kubernetes.io/projected/a3e44051-4743-4e6c-b60a-ab474e4cf74e-kube-api-access-l69p4\") pod \"redhat-operators-qq27m\" (UID: \"a3e44051-4743-4e6c-b60a-ab474e4cf74e\") " pod="openshift-marketplace/redhat-operators-qq27m" Dec 04 18:34:29 crc kubenswrapper[4631]: I1204 18:34:29.658318 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3e44051-4743-4e6c-b60a-ab474e4cf74e-catalog-content\") pod \"redhat-operators-qq27m\" (UID: \"a3e44051-4743-4e6c-b60a-ab474e4cf74e\") " pod="openshift-marketplace/redhat-operators-qq27m" Dec 04 18:34:29 crc kubenswrapper[4631]: I1204 18:34:29.657918 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3e44051-4743-4e6c-b60a-ab474e4cf74e-catalog-content\") pod \"redhat-operators-qq27m\" (UID: \"a3e44051-4743-4e6c-b60a-ab474e4cf74e\") " pod="openshift-marketplace/redhat-operators-qq27m" Dec 04 18:34:29 crc kubenswrapper[4631]: I1204 18:34:29.658643 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3e44051-4743-4e6c-b60a-ab474e4cf74e-utilities\") pod \"redhat-operators-qq27m\" (UID: \"a3e44051-4743-4e6c-b60a-ab474e4cf74e\") " pod="openshift-marketplace/redhat-operators-qq27m" Dec 04 18:34:29 crc kubenswrapper[4631]: I1204 18:34:29.658682 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3e44051-4743-4e6c-b60a-ab474e4cf74e-utilities\") pod \"redhat-operators-qq27m\" (UID: \"a3e44051-4743-4e6c-b60a-ab474e4cf74e\") " pod="openshift-marketplace/redhat-operators-qq27m" Dec 04 18:34:29 crc kubenswrapper[4631]: I1204 18:34:29.703880 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l69p4\" (UniqueName: \"kubernetes.io/projected/a3e44051-4743-4e6c-b60a-ab474e4cf74e-kube-api-access-l69p4\") pod \"redhat-operators-qq27m\" (UID: \"a3e44051-4743-4e6c-b60a-ab474e4cf74e\") " pod="openshift-marketplace/redhat-operators-qq27m" Dec 04 18:34:29 crc kubenswrapper[4631]: I1204 18:34:29.772384 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qq27m" Dec 04 18:34:30 crc kubenswrapper[4631]: I1204 18:34:30.256179 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qq27m"] Dec 04 18:34:30 crc kubenswrapper[4631]: I1204 18:34:30.387418 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qq27m" event={"ID":"a3e44051-4743-4e6c-b60a-ab474e4cf74e","Type":"ContainerStarted","Data":"f2cc5a0ac1a9e985d08c975f20be60d2caf4eb93adc14b9e05b4626980618dd7"} Dec 04 18:34:31 crc kubenswrapper[4631]: I1204 18:34:31.403217 4631 generic.go:334] "Generic (PLEG): container finished" podID="a3e44051-4743-4e6c-b60a-ab474e4cf74e" containerID="74e1ce6ff2f110c599662e23147e7623073c1f0c83652937e38f927aa49e0fb5" exitCode=0 Dec 04 18:34:31 crc kubenswrapper[4631]: I1204 18:34:31.403416 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qq27m" event={"ID":"a3e44051-4743-4e6c-b60a-ab474e4cf74e","Type":"ContainerDied","Data":"74e1ce6ff2f110c599662e23147e7623073c1f0c83652937e38f927aa49e0fb5"} Dec 04 18:34:32 crc kubenswrapper[4631]: I1204 18:34:32.427147 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qq27m" event={"ID":"a3e44051-4743-4e6c-b60a-ab474e4cf74e","Type":"ContainerStarted","Data":"1efad9f45ea781f71243ca097695fb817128c99f8f9ec31df8bcee59a97799d9"} Dec 04 18:34:35 crc kubenswrapper[4631]: I1204 18:34:35.460640 4631 generic.go:334] "Generic (PLEG): container finished" podID="a3e44051-4743-4e6c-b60a-ab474e4cf74e" containerID="1efad9f45ea781f71243ca097695fb817128c99f8f9ec31df8bcee59a97799d9" exitCode=0 Dec 04 18:34:35 crc kubenswrapper[4631]: I1204 18:34:35.460809 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qq27m" event={"ID":"a3e44051-4743-4e6c-b60a-ab474e4cf74e","Type":"ContainerDied","Data":"1efad9f45ea781f71243ca097695fb817128c99f8f9ec31df8bcee59a97799d9"} Dec 04 18:34:36 crc kubenswrapper[4631]: I1204 18:34:36.472454 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qq27m" event={"ID":"a3e44051-4743-4e6c-b60a-ab474e4cf74e","Type":"ContainerStarted","Data":"a163d332aa86c614a720c65bc67d63044af374dbf57db0d3a464bcdfd95571b6"} Dec 04 18:34:36 crc kubenswrapper[4631]: I1204 18:34:36.510118 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qq27m" podStartSLOduration=3.017548081 podStartE2EDuration="7.5100964s" podCreationTimestamp="2025-12-04 18:34:29 +0000 UTC" firstStartedPulling="2025-12-04 18:34:31.408212618 +0000 UTC m=+4001.440454616" lastFinishedPulling="2025-12-04 18:34:35.900760927 +0000 UTC m=+4005.933002935" observedRunningTime="2025-12-04 18:34:36.495676281 +0000 UTC m=+4006.527918279" watchObservedRunningTime="2025-12-04 18:34:36.5100964 +0000 UTC m=+4006.542338418" Dec 04 18:34:39 crc kubenswrapper[4631]: I1204 18:34:39.773055 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qq27m" Dec 04 18:34:39 crc kubenswrapper[4631]: I1204 18:34:39.773588 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qq27m" Dec 04 18:34:40 crc kubenswrapper[4631]: I1204 18:34:40.828269 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-qq27m" podUID="a3e44051-4743-4e6c-b60a-ab474e4cf74e" containerName="registry-server" probeResult="failure" output=< Dec 04 18:34:40 crc kubenswrapper[4631]: timeout: failed to connect service ":50051" within 1s Dec 04 18:34:40 crc kubenswrapper[4631]: > Dec 04 18:34:49 crc kubenswrapper[4631]: I1204 18:34:49.824182 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qq27m" Dec 04 18:34:49 crc kubenswrapper[4631]: I1204 18:34:49.897507 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qq27m" Dec 04 18:34:50 crc kubenswrapper[4631]: I1204 18:34:50.061717 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qq27m"] Dec 04 18:34:51 crc kubenswrapper[4631]: I1204 18:34:51.593579 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qq27m" podUID="a3e44051-4743-4e6c-b60a-ab474e4cf74e" containerName="registry-server" containerID="cri-o://a163d332aa86c614a720c65bc67d63044af374dbf57db0d3a464bcdfd95571b6" gracePeriod=2 Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.360564 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qq27m" Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.484437 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3e44051-4743-4e6c-b60a-ab474e4cf74e-utilities\") pod \"a3e44051-4743-4e6c-b60a-ab474e4cf74e\" (UID: \"a3e44051-4743-4e6c-b60a-ab474e4cf74e\") " Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.484561 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3e44051-4743-4e6c-b60a-ab474e4cf74e-catalog-content\") pod \"a3e44051-4743-4e6c-b60a-ab474e4cf74e\" (UID: \"a3e44051-4743-4e6c-b60a-ab474e4cf74e\") " Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.484636 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l69p4\" (UniqueName: \"kubernetes.io/projected/a3e44051-4743-4e6c-b60a-ab474e4cf74e-kube-api-access-l69p4\") pod \"a3e44051-4743-4e6c-b60a-ab474e4cf74e\" (UID: \"a3e44051-4743-4e6c-b60a-ab474e4cf74e\") " Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.485745 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3e44051-4743-4e6c-b60a-ab474e4cf74e-utilities" (OuterVolumeSpecName: "utilities") pod "a3e44051-4743-4e6c-b60a-ab474e4cf74e" (UID: "a3e44051-4743-4e6c-b60a-ab474e4cf74e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.513021 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3e44051-4743-4e6c-b60a-ab474e4cf74e-kube-api-access-l69p4" (OuterVolumeSpecName: "kube-api-access-l69p4") pod "a3e44051-4743-4e6c-b60a-ab474e4cf74e" (UID: "a3e44051-4743-4e6c-b60a-ab474e4cf74e"). InnerVolumeSpecName "kube-api-access-l69p4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.586642 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3e44051-4743-4e6c-b60a-ab474e4cf74e-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.586958 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l69p4\" (UniqueName: \"kubernetes.io/projected/a3e44051-4743-4e6c-b60a-ab474e4cf74e-kube-api-access-l69p4\") on node \"crc\" DevicePath \"\"" Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.602590 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3e44051-4743-4e6c-b60a-ab474e4cf74e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a3e44051-4743-4e6c-b60a-ab474e4cf74e" (UID: "a3e44051-4743-4e6c-b60a-ab474e4cf74e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.605473 4631 generic.go:334] "Generic (PLEG): container finished" podID="a3e44051-4743-4e6c-b60a-ab474e4cf74e" containerID="a163d332aa86c614a720c65bc67d63044af374dbf57db0d3a464bcdfd95571b6" exitCode=0 Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.605518 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qq27m" event={"ID":"a3e44051-4743-4e6c-b60a-ab474e4cf74e","Type":"ContainerDied","Data":"a163d332aa86c614a720c65bc67d63044af374dbf57db0d3a464bcdfd95571b6"} Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.605547 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qq27m" event={"ID":"a3e44051-4743-4e6c-b60a-ab474e4cf74e","Type":"ContainerDied","Data":"f2cc5a0ac1a9e985d08c975f20be60d2caf4eb93adc14b9e05b4626980618dd7"} Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.605572 4631 scope.go:117] "RemoveContainer" containerID="a163d332aa86c614a720c65bc67d63044af374dbf57db0d3a464bcdfd95571b6" Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.605760 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qq27m" Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.645034 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qq27m"] Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.646358 4631 scope.go:117] "RemoveContainer" containerID="1efad9f45ea781f71243ca097695fb817128c99f8f9ec31df8bcee59a97799d9" Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.655977 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qq27m"] Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.669778 4631 scope.go:117] "RemoveContainer" containerID="74e1ce6ff2f110c599662e23147e7623073c1f0c83652937e38f927aa49e0fb5" Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.689012 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3e44051-4743-4e6c-b60a-ab474e4cf74e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.716643 4631 scope.go:117] "RemoveContainer" containerID="a163d332aa86c614a720c65bc67d63044af374dbf57db0d3a464bcdfd95571b6" Dec 04 18:34:52 crc kubenswrapper[4631]: E1204 18:34:52.717333 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a163d332aa86c614a720c65bc67d63044af374dbf57db0d3a464bcdfd95571b6\": container with ID starting with a163d332aa86c614a720c65bc67d63044af374dbf57db0d3a464bcdfd95571b6 not found: ID does not exist" containerID="a163d332aa86c614a720c65bc67d63044af374dbf57db0d3a464bcdfd95571b6" Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.717453 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a163d332aa86c614a720c65bc67d63044af374dbf57db0d3a464bcdfd95571b6"} err="failed to get container status \"a163d332aa86c614a720c65bc67d63044af374dbf57db0d3a464bcdfd95571b6\": rpc error: code = NotFound desc = could not find container \"a163d332aa86c614a720c65bc67d63044af374dbf57db0d3a464bcdfd95571b6\": container with ID starting with a163d332aa86c614a720c65bc67d63044af374dbf57db0d3a464bcdfd95571b6 not found: ID does not exist" Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.717482 4631 scope.go:117] "RemoveContainer" containerID="1efad9f45ea781f71243ca097695fb817128c99f8f9ec31df8bcee59a97799d9" Dec 04 18:34:52 crc kubenswrapper[4631]: E1204 18:34:52.718363 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1efad9f45ea781f71243ca097695fb817128c99f8f9ec31df8bcee59a97799d9\": container with ID starting with 1efad9f45ea781f71243ca097695fb817128c99f8f9ec31df8bcee59a97799d9 not found: ID does not exist" containerID="1efad9f45ea781f71243ca097695fb817128c99f8f9ec31df8bcee59a97799d9" Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.718699 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1efad9f45ea781f71243ca097695fb817128c99f8f9ec31df8bcee59a97799d9"} err="failed to get container status \"1efad9f45ea781f71243ca097695fb817128c99f8f9ec31df8bcee59a97799d9\": rpc error: code = NotFound desc = could not find container \"1efad9f45ea781f71243ca097695fb817128c99f8f9ec31df8bcee59a97799d9\": container with ID starting with 1efad9f45ea781f71243ca097695fb817128c99f8f9ec31df8bcee59a97799d9 not found: ID does not exist" Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.718726 4631 scope.go:117] "RemoveContainer" containerID="74e1ce6ff2f110c599662e23147e7623073c1f0c83652937e38f927aa49e0fb5" Dec 04 18:34:52 crc kubenswrapper[4631]: E1204 18:34:52.719117 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74e1ce6ff2f110c599662e23147e7623073c1f0c83652937e38f927aa49e0fb5\": container with ID starting with 74e1ce6ff2f110c599662e23147e7623073c1f0c83652937e38f927aa49e0fb5 not found: ID does not exist" containerID="74e1ce6ff2f110c599662e23147e7623073c1f0c83652937e38f927aa49e0fb5" Dec 04 18:34:52 crc kubenswrapper[4631]: I1204 18:34:52.719165 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74e1ce6ff2f110c599662e23147e7623073c1f0c83652937e38f927aa49e0fb5"} err="failed to get container status \"74e1ce6ff2f110c599662e23147e7623073c1f0c83652937e38f927aa49e0fb5\": rpc error: code = NotFound desc = could not find container \"74e1ce6ff2f110c599662e23147e7623073c1f0c83652937e38f927aa49e0fb5\": container with ID starting with 74e1ce6ff2f110c599662e23147e7623073c1f0c83652937e38f927aa49e0fb5 not found: ID does not exist" Dec 04 18:34:54 crc kubenswrapper[4631]: I1204 18:34:54.258886 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3e44051-4743-4e6c-b60a-ab474e4cf74e" path="/var/lib/kubelet/pods/a3e44051-4743-4e6c-b60a-ab474e4cf74e/volumes" Dec 04 18:36:06 crc kubenswrapper[4631]: I1204 18:36:06.023010 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:36:06 crc kubenswrapper[4631]: I1204 18:36:06.023728 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:36:36 crc kubenswrapper[4631]: I1204 18:36:36.022600 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:36:36 crc kubenswrapper[4631]: I1204 18:36:36.023180 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:37:06 crc kubenswrapper[4631]: I1204 18:37:06.022780 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:37:06 crc kubenswrapper[4631]: I1204 18:37:06.023320 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:37:06 crc kubenswrapper[4631]: I1204 18:37:06.023364 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 18:37:06 crc kubenswrapper[4631]: I1204 18:37:06.024043 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a08abbadd1666d41c778d93c9168f1cedea0ac0fd4eecfadace71ecc85537949"} pod="openshift-machine-config-operator/machine-config-daemon-q27wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 18:37:06 crc kubenswrapper[4631]: I1204 18:37:06.024103 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" containerID="cri-o://a08abbadd1666d41c778d93c9168f1cedea0ac0fd4eecfadace71ecc85537949" gracePeriod=600 Dec 04 18:37:07 crc kubenswrapper[4631]: I1204 18:37:07.108601 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerID="a08abbadd1666d41c778d93c9168f1cedea0ac0fd4eecfadace71ecc85537949" exitCode=0 Dec 04 18:37:07 crc kubenswrapper[4631]: I1204 18:37:07.109137 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerDied","Data":"a08abbadd1666d41c778d93c9168f1cedea0ac0fd4eecfadace71ecc85537949"} Dec 04 18:37:07 crc kubenswrapper[4631]: I1204 18:37:07.109164 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef"} Dec 04 18:37:07 crc kubenswrapper[4631]: I1204 18:37:07.109178 4631 scope.go:117] "RemoveContainer" containerID="ce8b8caf517c1c2c3153fd2490716e0723910180e3f4bfcd82bdf15088f19dfa" Dec 04 18:38:59 crc kubenswrapper[4631]: I1204 18:38:59.339666 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5mnmc"] Dec 04 18:38:59 crc kubenswrapper[4631]: E1204 18:38:59.340603 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3e44051-4743-4e6c-b60a-ab474e4cf74e" containerName="extract-utilities" Dec 04 18:38:59 crc kubenswrapper[4631]: I1204 18:38:59.340617 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3e44051-4743-4e6c-b60a-ab474e4cf74e" containerName="extract-utilities" Dec 04 18:38:59 crc kubenswrapper[4631]: E1204 18:38:59.340639 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3e44051-4743-4e6c-b60a-ab474e4cf74e" containerName="extract-content" Dec 04 18:38:59 crc kubenswrapper[4631]: I1204 18:38:59.340645 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3e44051-4743-4e6c-b60a-ab474e4cf74e" containerName="extract-content" Dec 04 18:38:59 crc kubenswrapper[4631]: E1204 18:38:59.340659 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3e44051-4743-4e6c-b60a-ab474e4cf74e" containerName="registry-server" Dec 04 18:38:59 crc kubenswrapper[4631]: I1204 18:38:59.340666 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3e44051-4743-4e6c-b60a-ab474e4cf74e" containerName="registry-server" Dec 04 18:38:59 crc kubenswrapper[4631]: I1204 18:38:59.340843 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3e44051-4743-4e6c-b60a-ab474e4cf74e" containerName="registry-server" Dec 04 18:38:59 crc kubenswrapper[4631]: I1204 18:38:59.344789 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5mnmc" Dec 04 18:38:59 crc kubenswrapper[4631]: I1204 18:38:59.374815 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5mnmc"] Dec 04 18:38:59 crc kubenswrapper[4631]: I1204 18:38:59.483958 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a87993dc-da81-4878-9276-763e630de846-utilities\") pod \"redhat-marketplace-5mnmc\" (UID: \"a87993dc-da81-4878-9276-763e630de846\") " pod="openshift-marketplace/redhat-marketplace-5mnmc" Dec 04 18:38:59 crc kubenswrapper[4631]: I1204 18:38:59.484031 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqwgw\" (UniqueName: \"kubernetes.io/projected/a87993dc-da81-4878-9276-763e630de846-kube-api-access-mqwgw\") pod \"redhat-marketplace-5mnmc\" (UID: \"a87993dc-da81-4878-9276-763e630de846\") " pod="openshift-marketplace/redhat-marketplace-5mnmc" Dec 04 18:38:59 crc kubenswrapper[4631]: I1204 18:38:59.484164 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a87993dc-da81-4878-9276-763e630de846-catalog-content\") pod \"redhat-marketplace-5mnmc\" (UID: \"a87993dc-da81-4878-9276-763e630de846\") " pod="openshift-marketplace/redhat-marketplace-5mnmc" Dec 04 18:38:59 crc kubenswrapper[4631]: I1204 18:38:59.585587 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a87993dc-da81-4878-9276-763e630de846-utilities\") pod \"redhat-marketplace-5mnmc\" (UID: \"a87993dc-da81-4878-9276-763e630de846\") " pod="openshift-marketplace/redhat-marketplace-5mnmc" Dec 04 18:38:59 crc kubenswrapper[4631]: I1204 18:38:59.585655 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqwgw\" (UniqueName: \"kubernetes.io/projected/a87993dc-da81-4878-9276-763e630de846-kube-api-access-mqwgw\") pod \"redhat-marketplace-5mnmc\" (UID: \"a87993dc-da81-4878-9276-763e630de846\") " pod="openshift-marketplace/redhat-marketplace-5mnmc" Dec 04 18:38:59 crc kubenswrapper[4631]: I1204 18:38:59.585680 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a87993dc-da81-4878-9276-763e630de846-catalog-content\") pod \"redhat-marketplace-5mnmc\" (UID: \"a87993dc-da81-4878-9276-763e630de846\") " pod="openshift-marketplace/redhat-marketplace-5mnmc" Dec 04 18:38:59 crc kubenswrapper[4631]: I1204 18:38:59.586134 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a87993dc-da81-4878-9276-763e630de846-utilities\") pod \"redhat-marketplace-5mnmc\" (UID: \"a87993dc-da81-4878-9276-763e630de846\") " pod="openshift-marketplace/redhat-marketplace-5mnmc" Dec 04 18:38:59 crc kubenswrapper[4631]: I1204 18:38:59.586336 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a87993dc-da81-4878-9276-763e630de846-catalog-content\") pod \"redhat-marketplace-5mnmc\" (UID: \"a87993dc-da81-4878-9276-763e630de846\") " pod="openshift-marketplace/redhat-marketplace-5mnmc" Dec 04 18:38:59 crc kubenswrapper[4631]: I1204 18:38:59.608878 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqwgw\" (UniqueName: \"kubernetes.io/projected/a87993dc-da81-4878-9276-763e630de846-kube-api-access-mqwgw\") pod \"redhat-marketplace-5mnmc\" (UID: \"a87993dc-da81-4878-9276-763e630de846\") " pod="openshift-marketplace/redhat-marketplace-5mnmc" Dec 04 18:38:59 crc kubenswrapper[4631]: I1204 18:38:59.711883 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5mnmc" Dec 04 18:39:00 crc kubenswrapper[4631]: I1204 18:39:00.230129 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5mnmc"] Dec 04 18:39:01 crc kubenswrapper[4631]: I1204 18:39:01.167691 4631 generic.go:334] "Generic (PLEG): container finished" podID="a87993dc-da81-4878-9276-763e630de846" containerID="85e277c99add458d0b16498656fd7a9119aced885049615220a900b9f60cde0f" exitCode=0 Dec 04 18:39:01 crc kubenswrapper[4631]: I1204 18:39:01.167845 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mnmc" event={"ID":"a87993dc-da81-4878-9276-763e630de846","Type":"ContainerDied","Data":"85e277c99add458d0b16498656fd7a9119aced885049615220a900b9f60cde0f"} Dec 04 18:39:01 crc kubenswrapper[4631]: I1204 18:39:01.168001 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mnmc" event={"ID":"a87993dc-da81-4878-9276-763e630de846","Type":"ContainerStarted","Data":"2b838e5dd852b6e152a188ac54510de849b8acc28ba65c0f3c10c99ad0706f13"} Dec 04 18:39:02 crc kubenswrapper[4631]: I1204 18:39:02.179276 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mnmc" event={"ID":"a87993dc-da81-4878-9276-763e630de846","Type":"ContainerStarted","Data":"2dd33ae831e2ebbf191ae7de867cd87c0f16978810395e6eac0e32514b44b71b"} Dec 04 18:39:02 crc kubenswrapper[4631]: I1204 18:39:02.334971 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9xr4h"] Dec 04 18:39:02 crc kubenswrapper[4631]: I1204 18:39:02.336793 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9xr4h" Dec 04 18:39:02 crc kubenswrapper[4631]: I1204 18:39:02.352457 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9xr4h"] Dec 04 18:39:02 crc kubenswrapper[4631]: I1204 18:39:02.469644 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaad54de-a5f3-452d-b552-56376be0739a-catalog-content\") pod \"community-operators-9xr4h\" (UID: \"aaad54de-a5f3-452d-b552-56376be0739a\") " pod="openshift-marketplace/community-operators-9xr4h" Dec 04 18:39:02 crc kubenswrapper[4631]: I1204 18:39:02.469965 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6mqr\" (UniqueName: \"kubernetes.io/projected/aaad54de-a5f3-452d-b552-56376be0739a-kube-api-access-j6mqr\") pod \"community-operators-9xr4h\" (UID: \"aaad54de-a5f3-452d-b552-56376be0739a\") " pod="openshift-marketplace/community-operators-9xr4h" Dec 04 18:39:02 crc kubenswrapper[4631]: I1204 18:39:02.470069 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaad54de-a5f3-452d-b552-56376be0739a-utilities\") pod \"community-operators-9xr4h\" (UID: \"aaad54de-a5f3-452d-b552-56376be0739a\") " pod="openshift-marketplace/community-operators-9xr4h" Dec 04 18:39:02 crc kubenswrapper[4631]: I1204 18:39:02.572251 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaad54de-a5f3-452d-b552-56376be0739a-catalog-content\") pod \"community-operators-9xr4h\" (UID: \"aaad54de-a5f3-452d-b552-56376be0739a\") " pod="openshift-marketplace/community-operators-9xr4h" Dec 04 18:39:02 crc kubenswrapper[4631]: I1204 18:39:02.572340 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6mqr\" (UniqueName: \"kubernetes.io/projected/aaad54de-a5f3-452d-b552-56376be0739a-kube-api-access-j6mqr\") pod \"community-operators-9xr4h\" (UID: \"aaad54de-a5f3-452d-b552-56376be0739a\") " pod="openshift-marketplace/community-operators-9xr4h" Dec 04 18:39:02 crc kubenswrapper[4631]: I1204 18:39:02.572359 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaad54de-a5f3-452d-b552-56376be0739a-utilities\") pod \"community-operators-9xr4h\" (UID: \"aaad54de-a5f3-452d-b552-56376be0739a\") " pod="openshift-marketplace/community-operators-9xr4h" Dec 04 18:39:02 crc kubenswrapper[4631]: I1204 18:39:02.572797 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaad54de-a5f3-452d-b552-56376be0739a-utilities\") pod \"community-operators-9xr4h\" (UID: \"aaad54de-a5f3-452d-b552-56376be0739a\") " pod="openshift-marketplace/community-operators-9xr4h" Dec 04 18:39:02 crc kubenswrapper[4631]: I1204 18:39:02.572998 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaad54de-a5f3-452d-b552-56376be0739a-catalog-content\") pod \"community-operators-9xr4h\" (UID: \"aaad54de-a5f3-452d-b552-56376be0739a\") " pod="openshift-marketplace/community-operators-9xr4h" Dec 04 18:39:02 crc kubenswrapper[4631]: I1204 18:39:02.606816 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6mqr\" (UniqueName: \"kubernetes.io/projected/aaad54de-a5f3-452d-b552-56376be0739a-kube-api-access-j6mqr\") pod \"community-operators-9xr4h\" (UID: \"aaad54de-a5f3-452d-b552-56376be0739a\") " pod="openshift-marketplace/community-operators-9xr4h" Dec 04 18:39:02 crc kubenswrapper[4631]: I1204 18:39:02.653012 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9xr4h" Dec 04 18:39:03 crc kubenswrapper[4631]: I1204 18:39:03.162769 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9xr4h"] Dec 04 18:39:03 crc kubenswrapper[4631]: W1204 18:39:03.166953 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaaad54de_a5f3_452d_b552_56376be0739a.slice/crio-ead62e58a7132f979df2208c1c48a1cc314363af97dee34238a47f1c5795318f WatchSource:0}: Error finding container ead62e58a7132f979df2208c1c48a1cc314363af97dee34238a47f1c5795318f: Status 404 returned error can't find the container with id ead62e58a7132f979df2208c1c48a1cc314363af97dee34238a47f1c5795318f Dec 04 18:39:03 crc kubenswrapper[4631]: I1204 18:39:03.226694 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xr4h" event={"ID":"aaad54de-a5f3-452d-b552-56376be0739a","Type":"ContainerStarted","Data":"ead62e58a7132f979df2208c1c48a1cc314363af97dee34238a47f1c5795318f"} Dec 04 18:39:03 crc kubenswrapper[4631]: I1204 18:39:03.266626 4631 generic.go:334] "Generic (PLEG): container finished" podID="a87993dc-da81-4878-9276-763e630de846" containerID="2dd33ae831e2ebbf191ae7de867cd87c0f16978810395e6eac0e32514b44b71b" exitCode=0 Dec 04 18:39:03 crc kubenswrapper[4631]: I1204 18:39:03.266665 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mnmc" event={"ID":"a87993dc-da81-4878-9276-763e630de846","Type":"ContainerDied","Data":"2dd33ae831e2ebbf191ae7de867cd87c0f16978810395e6eac0e32514b44b71b"} Dec 04 18:39:04 crc kubenswrapper[4631]: I1204 18:39:04.277596 4631 generic.go:334] "Generic (PLEG): container finished" podID="aaad54de-a5f3-452d-b552-56376be0739a" containerID="1a7baeae3fed22d9d151417fece3a6b9d71476a5c381fd2178a94f662873d319" exitCode=0 Dec 04 18:39:04 crc kubenswrapper[4631]: I1204 18:39:04.277708 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xr4h" event={"ID":"aaad54de-a5f3-452d-b552-56376be0739a","Type":"ContainerDied","Data":"1a7baeae3fed22d9d151417fece3a6b9d71476a5c381fd2178a94f662873d319"} Dec 04 18:39:04 crc kubenswrapper[4631]: I1204 18:39:04.282781 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mnmc" event={"ID":"a87993dc-da81-4878-9276-763e630de846","Type":"ContainerStarted","Data":"c39adf52a055ed635e55ebafab621766519ae20cc0848004adf63d2764ed9254"} Dec 04 18:39:04 crc kubenswrapper[4631]: I1204 18:39:04.324962 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5mnmc" podStartSLOduration=2.785156882 podStartE2EDuration="5.324943399s" podCreationTimestamp="2025-12-04 18:38:59 +0000 UTC" firstStartedPulling="2025-12-04 18:39:01.170324824 +0000 UTC m=+4271.202566832" lastFinishedPulling="2025-12-04 18:39:03.710111361 +0000 UTC m=+4273.742353349" observedRunningTime="2025-12-04 18:39:04.320675678 +0000 UTC m=+4274.352917676" watchObservedRunningTime="2025-12-04 18:39:04.324943399 +0000 UTC m=+4274.357185397" Dec 04 18:39:05 crc kubenswrapper[4631]: I1204 18:39:05.293468 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xr4h" event={"ID":"aaad54de-a5f3-452d-b552-56376be0739a","Type":"ContainerStarted","Data":"c50bb4790bde5e4bb7f682bed99276e8a9af8c2bf5094d1121f33fc919444c52"} Dec 04 18:39:06 crc kubenswrapper[4631]: I1204 18:39:06.023400 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:39:06 crc kubenswrapper[4631]: I1204 18:39:06.023461 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:39:06 crc kubenswrapper[4631]: I1204 18:39:06.301480 4631 generic.go:334] "Generic (PLEG): container finished" podID="aaad54de-a5f3-452d-b552-56376be0739a" containerID="c50bb4790bde5e4bb7f682bed99276e8a9af8c2bf5094d1121f33fc919444c52" exitCode=0 Dec 04 18:39:06 crc kubenswrapper[4631]: I1204 18:39:06.301519 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xr4h" event={"ID":"aaad54de-a5f3-452d-b552-56376be0739a","Type":"ContainerDied","Data":"c50bb4790bde5e4bb7f682bed99276e8a9af8c2bf5094d1121f33fc919444c52"} Dec 04 18:39:07 crc kubenswrapper[4631]: I1204 18:39:07.312040 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xr4h" event={"ID":"aaad54de-a5f3-452d-b552-56376be0739a","Type":"ContainerStarted","Data":"047716b357ef2073183c2b173b4627e76c07d0ae0c2c8c045306360b7a6d36dd"} Dec 04 18:39:07 crc kubenswrapper[4631]: I1204 18:39:07.337998 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9xr4h" podStartSLOduration=2.831649825 podStartE2EDuration="5.337977711s" podCreationTimestamp="2025-12-04 18:39:02 +0000 UTC" firstStartedPulling="2025-12-04 18:39:04.280009993 +0000 UTC m=+4274.312251991" lastFinishedPulling="2025-12-04 18:39:06.786337879 +0000 UTC m=+4276.818579877" observedRunningTime="2025-12-04 18:39:07.332727372 +0000 UTC m=+4277.364969380" watchObservedRunningTime="2025-12-04 18:39:07.337977711 +0000 UTC m=+4277.370219709" Dec 04 18:39:09 crc kubenswrapper[4631]: I1204 18:39:09.712616 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5mnmc" Dec 04 18:39:09 crc kubenswrapper[4631]: I1204 18:39:09.713036 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5mnmc" Dec 04 18:39:10 crc kubenswrapper[4631]: I1204 18:39:10.051874 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5mnmc" Dec 04 18:39:10 crc kubenswrapper[4631]: I1204 18:39:10.388131 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5mnmc" Dec 04 18:39:12 crc kubenswrapper[4631]: I1204 18:39:12.653940 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9xr4h" Dec 04 18:39:12 crc kubenswrapper[4631]: I1204 18:39:12.654414 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9xr4h" Dec 04 18:39:12 crc kubenswrapper[4631]: I1204 18:39:12.705092 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9xr4h" Dec 04 18:39:13 crc kubenswrapper[4631]: I1204 18:39:13.405482 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9xr4h" Dec 04 18:39:14 crc kubenswrapper[4631]: I1204 18:39:14.131440 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9xr4h"] Dec 04 18:39:14 crc kubenswrapper[4631]: I1204 18:39:14.538740 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5mnmc"] Dec 04 18:39:14 crc kubenswrapper[4631]: I1204 18:39:14.539248 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5mnmc" podUID="a87993dc-da81-4878-9276-763e630de846" containerName="registry-server" containerID="cri-o://c39adf52a055ed635e55ebafab621766519ae20cc0848004adf63d2764ed9254" gracePeriod=2 Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.110086 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5mnmc" Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.239661 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a87993dc-da81-4878-9276-763e630de846-catalog-content\") pod \"a87993dc-da81-4878-9276-763e630de846\" (UID: \"a87993dc-da81-4878-9276-763e630de846\") " Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.240299 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a87993dc-da81-4878-9276-763e630de846-utilities\") pod \"a87993dc-da81-4878-9276-763e630de846\" (UID: \"a87993dc-da81-4878-9276-763e630de846\") " Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.240420 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqwgw\" (UniqueName: \"kubernetes.io/projected/a87993dc-da81-4878-9276-763e630de846-kube-api-access-mqwgw\") pod \"a87993dc-da81-4878-9276-763e630de846\" (UID: \"a87993dc-da81-4878-9276-763e630de846\") " Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.241486 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a87993dc-da81-4878-9276-763e630de846-utilities" (OuterVolumeSpecName: "utilities") pod "a87993dc-da81-4878-9276-763e630de846" (UID: "a87993dc-da81-4878-9276-763e630de846"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.250535 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a87993dc-da81-4878-9276-763e630de846-kube-api-access-mqwgw" (OuterVolumeSpecName: "kube-api-access-mqwgw") pod "a87993dc-da81-4878-9276-763e630de846" (UID: "a87993dc-da81-4878-9276-763e630de846"). InnerVolumeSpecName "kube-api-access-mqwgw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.265148 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a87993dc-da81-4878-9276-763e630de846-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a87993dc-da81-4878-9276-763e630de846" (UID: "a87993dc-da81-4878-9276-763e630de846"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.343442 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a87993dc-da81-4878-9276-763e630de846-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.343473 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqwgw\" (UniqueName: \"kubernetes.io/projected/a87993dc-da81-4878-9276-763e630de846-kube-api-access-mqwgw\") on node \"crc\" DevicePath \"\"" Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.343487 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a87993dc-da81-4878-9276-763e630de846-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.382224 4631 generic.go:334] "Generic (PLEG): container finished" podID="a87993dc-da81-4878-9276-763e630de846" containerID="c39adf52a055ed635e55ebafab621766519ae20cc0848004adf63d2764ed9254" exitCode=0 Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.382424 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9xr4h" podUID="aaad54de-a5f3-452d-b552-56376be0739a" containerName="registry-server" containerID="cri-o://047716b357ef2073183c2b173b4627e76c07d0ae0c2c8c045306360b7a6d36dd" gracePeriod=2 Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.382509 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5mnmc" Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.383494 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mnmc" event={"ID":"a87993dc-da81-4878-9276-763e630de846","Type":"ContainerDied","Data":"c39adf52a055ed635e55ebafab621766519ae20cc0848004adf63d2764ed9254"} Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.383884 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mnmc" event={"ID":"a87993dc-da81-4878-9276-763e630de846","Type":"ContainerDied","Data":"2b838e5dd852b6e152a188ac54510de849b8acc28ba65c0f3c10c99ad0706f13"} Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.383987 4631 scope.go:117] "RemoveContainer" containerID="c39adf52a055ed635e55ebafab621766519ae20cc0848004adf63d2764ed9254" Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.426488 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5mnmc"] Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.430888 4631 scope.go:117] "RemoveContainer" containerID="2dd33ae831e2ebbf191ae7de867cd87c0f16978810395e6eac0e32514b44b71b" Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.437851 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5mnmc"] Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.461608 4631 scope.go:117] "RemoveContainer" containerID="85e277c99add458d0b16498656fd7a9119aced885049615220a900b9f60cde0f" Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.494749 4631 scope.go:117] "RemoveContainer" containerID="c39adf52a055ed635e55ebafab621766519ae20cc0848004adf63d2764ed9254" Dec 04 18:39:15 crc kubenswrapper[4631]: E1204 18:39:15.495782 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c39adf52a055ed635e55ebafab621766519ae20cc0848004adf63d2764ed9254\": container with ID starting with c39adf52a055ed635e55ebafab621766519ae20cc0848004adf63d2764ed9254 not found: ID does not exist" containerID="c39adf52a055ed635e55ebafab621766519ae20cc0848004adf63d2764ed9254" Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.495820 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c39adf52a055ed635e55ebafab621766519ae20cc0848004adf63d2764ed9254"} err="failed to get container status \"c39adf52a055ed635e55ebafab621766519ae20cc0848004adf63d2764ed9254\": rpc error: code = NotFound desc = could not find container \"c39adf52a055ed635e55ebafab621766519ae20cc0848004adf63d2764ed9254\": container with ID starting with c39adf52a055ed635e55ebafab621766519ae20cc0848004adf63d2764ed9254 not found: ID does not exist" Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.495844 4631 scope.go:117] "RemoveContainer" containerID="2dd33ae831e2ebbf191ae7de867cd87c0f16978810395e6eac0e32514b44b71b" Dec 04 18:39:15 crc kubenswrapper[4631]: E1204 18:39:15.496330 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2dd33ae831e2ebbf191ae7de867cd87c0f16978810395e6eac0e32514b44b71b\": container with ID starting with 2dd33ae831e2ebbf191ae7de867cd87c0f16978810395e6eac0e32514b44b71b not found: ID does not exist" containerID="2dd33ae831e2ebbf191ae7de867cd87c0f16978810395e6eac0e32514b44b71b" Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.496384 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2dd33ae831e2ebbf191ae7de867cd87c0f16978810395e6eac0e32514b44b71b"} err="failed to get container status \"2dd33ae831e2ebbf191ae7de867cd87c0f16978810395e6eac0e32514b44b71b\": rpc error: code = NotFound desc = could not find container \"2dd33ae831e2ebbf191ae7de867cd87c0f16978810395e6eac0e32514b44b71b\": container with ID starting with 2dd33ae831e2ebbf191ae7de867cd87c0f16978810395e6eac0e32514b44b71b not found: ID does not exist" Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.496412 4631 scope.go:117] "RemoveContainer" containerID="85e277c99add458d0b16498656fd7a9119aced885049615220a900b9f60cde0f" Dec 04 18:39:15 crc kubenswrapper[4631]: E1204 18:39:15.496856 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85e277c99add458d0b16498656fd7a9119aced885049615220a900b9f60cde0f\": container with ID starting with 85e277c99add458d0b16498656fd7a9119aced885049615220a900b9f60cde0f not found: ID does not exist" containerID="85e277c99add458d0b16498656fd7a9119aced885049615220a900b9f60cde0f" Dec 04 18:39:15 crc kubenswrapper[4631]: I1204 18:39:15.496895 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85e277c99add458d0b16498656fd7a9119aced885049615220a900b9f60cde0f"} err="failed to get container status \"85e277c99add458d0b16498656fd7a9119aced885049615220a900b9f60cde0f\": rpc error: code = NotFound desc = could not find container \"85e277c99add458d0b16498656fd7a9119aced885049615220a900b9f60cde0f\": container with ID starting with 85e277c99add458d0b16498656fd7a9119aced885049615220a900b9f60cde0f not found: ID does not exist" Dec 04 18:39:16 crc kubenswrapper[4631]: I1204 18:39:16.252184 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a87993dc-da81-4878-9276-763e630de846" path="/var/lib/kubelet/pods/a87993dc-da81-4878-9276-763e630de846/volumes" Dec 04 18:39:16 crc kubenswrapper[4631]: I1204 18:39:16.393061 4631 generic.go:334] "Generic (PLEG): container finished" podID="aaad54de-a5f3-452d-b552-56376be0739a" containerID="047716b357ef2073183c2b173b4627e76c07d0ae0c2c8c045306360b7a6d36dd" exitCode=0 Dec 04 18:39:16 crc kubenswrapper[4631]: I1204 18:39:16.393154 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xr4h" event={"ID":"aaad54de-a5f3-452d-b552-56376be0739a","Type":"ContainerDied","Data":"047716b357ef2073183c2b173b4627e76c07d0ae0c2c8c045306360b7a6d36dd"} Dec 04 18:39:16 crc kubenswrapper[4631]: I1204 18:39:16.393847 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xr4h" event={"ID":"aaad54de-a5f3-452d-b552-56376be0739a","Type":"ContainerDied","Data":"ead62e58a7132f979df2208c1c48a1cc314363af97dee34238a47f1c5795318f"} Dec 04 18:39:16 crc kubenswrapper[4631]: I1204 18:39:16.393867 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ead62e58a7132f979df2208c1c48a1cc314363af97dee34238a47f1c5795318f" Dec 04 18:39:16 crc kubenswrapper[4631]: I1204 18:39:16.415924 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9xr4h" Dec 04 18:39:16 crc kubenswrapper[4631]: I1204 18:39:16.469161 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6mqr\" (UniqueName: \"kubernetes.io/projected/aaad54de-a5f3-452d-b552-56376be0739a-kube-api-access-j6mqr\") pod \"aaad54de-a5f3-452d-b552-56376be0739a\" (UID: \"aaad54de-a5f3-452d-b552-56376be0739a\") " Dec 04 18:39:16 crc kubenswrapper[4631]: I1204 18:39:16.469264 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaad54de-a5f3-452d-b552-56376be0739a-utilities\") pod \"aaad54de-a5f3-452d-b552-56376be0739a\" (UID: \"aaad54de-a5f3-452d-b552-56376be0739a\") " Dec 04 18:39:16 crc kubenswrapper[4631]: I1204 18:39:16.469474 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaad54de-a5f3-452d-b552-56376be0739a-catalog-content\") pod \"aaad54de-a5f3-452d-b552-56376be0739a\" (UID: \"aaad54de-a5f3-452d-b552-56376be0739a\") " Dec 04 18:39:16 crc kubenswrapper[4631]: I1204 18:39:16.470478 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aaad54de-a5f3-452d-b552-56376be0739a-utilities" (OuterVolumeSpecName: "utilities") pod "aaad54de-a5f3-452d-b552-56376be0739a" (UID: "aaad54de-a5f3-452d-b552-56376be0739a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:39:16 crc kubenswrapper[4631]: I1204 18:39:16.532480 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aaad54de-a5f3-452d-b552-56376be0739a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aaad54de-a5f3-452d-b552-56376be0739a" (UID: "aaad54de-a5f3-452d-b552-56376be0739a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:39:16 crc kubenswrapper[4631]: I1204 18:39:16.572628 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaad54de-a5f3-452d-b552-56376be0739a-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:39:16 crc kubenswrapper[4631]: I1204 18:39:16.572691 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaad54de-a5f3-452d-b552-56376be0739a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:39:17 crc kubenswrapper[4631]: I1204 18:39:17.003356 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aaad54de-a5f3-452d-b552-56376be0739a-kube-api-access-j6mqr" (OuterVolumeSpecName: "kube-api-access-j6mqr") pod "aaad54de-a5f3-452d-b552-56376be0739a" (UID: "aaad54de-a5f3-452d-b552-56376be0739a"). InnerVolumeSpecName "kube-api-access-j6mqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:39:17 crc kubenswrapper[4631]: I1204 18:39:17.083272 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6mqr\" (UniqueName: \"kubernetes.io/projected/aaad54de-a5f3-452d-b552-56376be0739a-kube-api-access-j6mqr\") on node \"crc\" DevicePath \"\"" Dec 04 18:39:17 crc kubenswrapper[4631]: I1204 18:39:17.400113 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9xr4h" Dec 04 18:39:17 crc kubenswrapper[4631]: I1204 18:39:17.444514 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9xr4h"] Dec 04 18:39:17 crc kubenswrapper[4631]: I1204 18:39:17.456253 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9xr4h"] Dec 04 18:39:18 crc kubenswrapper[4631]: I1204 18:39:18.259851 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aaad54de-a5f3-452d-b552-56376be0739a" path="/var/lib/kubelet/pods/aaad54de-a5f3-452d-b552-56376be0739a/volumes" Dec 04 18:39:36 crc kubenswrapper[4631]: I1204 18:39:36.023007 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:39:36 crc kubenswrapper[4631]: I1204 18:39:36.023617 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:40:04 crc kubenswrapper[4631]: I1204 18:40:04.803079 4631 generic.go:334] "Generic (PLEG): container finished" podID="36e446e6-248d-4a69-80f1-585a9bfcd4cf" containerID="f39fba016f1c04bf310ee5b23eb7500a5267e84cf36d997b427307f67827a4f4" exitCode=0 Dec 04 18:40:04 crc kubenswrapper[4631]: I1204 18:40:04.803102 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"36e446e6-248d-4a69-80f1-585a9bfcd4cf","Type":"ContainerDied","Data":"f39fba016f1c04bf310ee5b23eb7500a5267e84cf36d997b427307f67827a4f4"} Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.022489 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.022798 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.022838 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.023537 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef"} pod="openshift-machine-config-operator/machine-config-daemon-q27wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.023604 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" containerID="cri-o://f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" gracePeriod=600 Dec 04 18:40:06 crc kubenswrapper[4631]: E1204 18:40:06.153551 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.201566 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.226896 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/36e446e6-248d-4a69-80f1-585a9bfcd4cf-openstack-config-secret\") pod \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.226997 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36e446e6-248d-4a69-80f1-585a9bfcd4cf-ssh-key\") pod \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.227058 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phhs8\" (UniqueName: \"kubernetes.io/projected/36e446e6-248d-4a69-80f1-585a9bfcd4cf-kube-api-access-phhs8\") pod \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.227091 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/36e446e6-248d-4a69-80f1-585a9bfcd4cf-test-operator-ephemeral-workdir\") pod \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.227125 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/36e446e6-248d-4a69-80f1-585a9bfcd4cf-config-data\") pod \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.227198 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/36e446e6-248d-4a69-80f1-585a9bfcd4cf-test-operator-ephemeral-temporary\") pod \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.227271 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/36e446e6-248d-4a69-80f1-585a9bfcd4cf-openstack-config\") pod \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.227329 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/36e446e6-248d-4a69-80f1-585a9bfcd4cf-ca-certs\") pod \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.227357 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\" (UID: \"36e446e6-248d-4a69-80f1-585a9bfcd4cf\") " Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.228205 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36e446e6-248d-4a69-80f1-585a9bfcd4cf-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "36e446e6-248d-4a69-80f1-585a9bfcd4cf" (UID: "36e446e6-248d-4a69-80f1-585a9bfcd4cf"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.229846 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36e446e6-248d-4a69-80f1-585a9bfcd4cf-config-data" (OuterVolumeSpecName: "config-data") pod "36e446e6-248d-4a69-80f1-585a9bfcd4cf" (UID: "36e446e6-248d-4a69-80f1-585a9bfcd4cf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.234011 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36e446e6-248d-4a69-80f1-585a9bfcd4cf-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "36e446e6-248d-4a69-80f1-585a9bfcd4cf" (UID: "36e446e6-248d-4a69-80f1-585a9bfcd4cf"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.236975 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "test-operator-logs") pod "36e446e6-248d-4a69-80f1-585a9bfcd4cf" (UID: "36e446e6-248d-4a69-80f1-585a9bfcd4cf"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.250540 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36e446e6-248d-4a69-80f1-585a9bfcd4cf-kube-api-access-phhs8" (OuterVolumeSpecName: "kube-api-access-phhs8") pod "36e446e6-248d-4a69-80f1-585a9bfcd4cf" (UID: "36e446e6-248d-4a69-80f1-585a9bfcd4cf"). InnerVolumeSpecName "kube-api-access-phhs8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.265310 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36e446e6-248d-4a69-80f1-585a9bfcd4cf-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "36e446e6-248d-4a69-80f1-585a9bfcd4cf" (UID: "36e446e6-248d-4a69-80f1-585a9bfcd4cf"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.286897 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36e446e6-248d-4a69-80f1-585a9bfcd4cf-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "36e446e6-248d-4a69-80f1-585a9bfcd4cf" (UID: "36e446e6-248d-4a69-80f1-585a9bfcd4cf"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.288445 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36e446e6-248d-4a69-80f1-585a9bfcd4cf-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "36e446e6-248d-4a69-80f1-585a9bfcd4cf" (UID: "36e446e6-248d-4a69-80f1-585a9bfcd4cf"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.305258 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36e446e6-248d-4a69-80f1-585a9bfcd4cf-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "36e446e6-248d-4a69-80f1-585a9bfcd4cf" (UID: "36e446e6-248d-4a69-80f1-585a9bfcd4cf"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.330462 4631 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/36e446e6-248d-4a69-80f1-585a9bfcd4cf-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.331624 4631 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.333924 4631 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/36e446e6-248d-4a69-80f1-585a9bfcd4cf-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.334116 4631 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36e446e6-248d-4a69-80f1-585a9bfcd4cf-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.334216 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phhs8\" (UniqueName: \"kubernetes.io/projected/36e446e6-248d-4a69-80f1-585a9bfcd4cf-kube-api-access-phhs8\") on node \"crc\" DevicePath \"\"" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.334310 4631 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/36e446e6-248d-4a69-80f1-585a9bfcd4cf-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.334495 4631 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/36e446e6-248d-4a69-80f1-585a9bfcd4cf-config-data\") on node \"crc\" DevicePath \"\"" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.334585 4631 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/36e446e6-248d-4a69-80f1-585a9bfcd4cf-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.334656 4631 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/36e446e6-248d-4a69-80f1-585a9bfcd4cf-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.352969 4631 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.436731 4631 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.822307 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" exitCode=0 Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.822400 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerDied","Data":"f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef"} Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.822456 4631 scope.go:117] "RemoveContainer" containerID="a08abbadd1666d41c778d93c9168f1cedea0ac0fd4eecfadace71ecc85537949" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.823265 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:40:06 crc kubenswrapper[4631]: E1204 18:40:06.823567 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.824627 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"36e446e6-248d-4a69-80f1-585a9bfcd4cf","Type":"ContainerDied","Data":"c372aac970b065751e0adce7b6a5fbe3583a27c496d286a0e17f781147054888"} Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.824649 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c372aac970b065751e0adce7b6a5fbe3583a27c496d286a0e17f781147054888" Dec 04 18:40:06 crc kubenswrapper[4631]: I1204 18:40:06.824698 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.184646 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 04 18:40:15 crc kubenswrapper[4631]: E1204 18:40:15.185732 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a87993dc-da81-4878-9276-763e630de846" containerName="extract-utilities" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.185749 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a87993dc-da81-4878-9276-763e630de846" containerName="extract-utilities" Dec 04 18:40:15 crc kubenswrapper[4631]: E1204 18:40:15.185774 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a87993dc-da81-4878-9276-763e630de846" containerName="registry-server" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.185782 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a87993dc-da81-4878-9276-763e630de846" containerName="registry-server" Dec 04 18:40:15 crc kubenswrapper[4631]: E1204 18:40:15.185811 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaad54de-a5f3-452d-b552-56376be0739a" containerName="registry-server" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.185820 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaad54de-a5f3-452d-b552-56376be0739a" containerName="registry-server" Dec 04 18:40:15 crc kubenswrapper[4631]: E1204 18:40:15.185835 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36e446e6-248d-4a69-80f1-585a9bfcd4cf" containerName="tempest-tests-tempest-tests-runner" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.185844 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="36e446e6-248d-4a69-80f1-585a9bfcd4cf" containerName="tempest-tests-tempest-tests-runner" Dec 04 18:40:15 crc kubenswrapper[4631]: E1204 18:40:15.185856 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaad54de-a5f3-452d-b552-56376be0739a" containerName="extract-content" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.185864 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaad54de-a5f3-452d-b552-56376be0739a" containerName="extract-content" Dec 04 18:40:15 crc kubenswrapper[4631]: E1204 18:40:15.185884 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a87993dc-da81-4878-9276-763e630de846" containerName="extract-content" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.185893 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a87993dc-da81-4878-9276-763e630de846" containerName="extract-content" Dec 04 18:40:15 crc kubenswrapper[4631]: E1204 18:40:15.185903 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaad54de-a5f3-452d-b552-56376be0739a" containerName="extract-utilities" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.185910 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaad54de-a5f3-452d-b552-56376be0739a" containerName="extract-utilities" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.186115 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="36e446e6-248d-4a69-80f1-585a9bfcd4cf" containerName="tempest-tests-tempest-tests-runner" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.186130 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="a87993dc-da81-4878-9276-763e630de846" containerName="registry-server" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.186148 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="aaad54de-a5f3-452d-b552-56376be0739a" containerName="registry-server" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.186873 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.189771 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-vnbgz" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.207595 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.231238 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cc75bbe8-1619-48a6-8dd8-4353f50fac82\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.231293 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-db4lm\" (UniqueName: \"kubernetes.io/projected/cc75bbe8-1619-48a6-8dd8-4353f50fac82-kube-api-access-db4lm\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cc75bbe8-1619-48a6-8dd8-4353f50fac82\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.333903 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cc75bbe8-1619-48a6-8dd8-4353f50fac82\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.333954 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-db4lm\" (UniqueName: \"kubernetes.io/projected/cc75bbe8-1619-48a6-8dd8-4353f50fac82-kube-api-access-db4lm\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cc75bbe8-1619-48a6-8dd8-4353f50fac82\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.336918 4631 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cc75bbe8-1619-48a6-8dd8-4353f50fac82\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.360661 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-db4lm\" (UniqueName: \"kubernetes.io/projected/cc75bbe8-1619-48a6-8dd8-4353f50fac82-kube-api-access-db4lm\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cc75bbe8-1619-48a6-8dd8-4353f50fac82\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.379142 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cc75bbe8-1619-48a6-8dd8-4353f50fac82\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.518978 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.975267 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 04 18:40:15 crc kubenswrapper[4631]: I1204 18:40:15.986885 4631 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 18:40:16 crc kubenswrapper[4631]: I1204 18:40:16.913888 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"cc75bbe8-1619-48a6-8dd8-4353f50fac82","Type":"ContainerStarted","Data":"22cbaf386598cf8215d1aa3ec07cc2ec53e03f4d97eabb0d0d95bb4cbeb417e4"} Dec 04 18:40:17 crc kubenswrapper[4631]: I1204 18:40:17.926590 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"cc75bbe8-1619-48a6-8dd8-4353f50fac82","Type":"ContainerStarted","Data":"d5b95aaa9c0180aef33d573309342677e92d50d7f03ad82e1a1b8dd9bdc80aa7"} Dec 04 18:40:17 crc kubenswrapper[4631]: I1204 18:40:17.950039 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.212872995 podStartE2EDuration="2.950020151s" podCreationTimestamp="2025-12-04 18:40:15 +0000 UTC" firstStartedPulling="2025-12-04 18:40:15.986700189 +0000 UTC m=+4346.018942177" lastFinishedPulling="2025-12-04 18:40:16.723847335 +0000 UTC m=+4346.756089333" observedRunningTime="2025-12-04 18:40:17.942500287 +0000 UTC m=+4347.974742305" watchObservedRunningTime="2025-12-04 18:40:17.950020151 +0000 UTC m=+4347.982262159" Dec 04 18:40:18 crc kubenswrapper[4631]: I1204 18:40:18.240331 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:40:18 crc kubenswrapper[4631]: E1204 18:40:18.240721 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:40:29 crc kubenswrapper[4631]: I1204 18:40:29.239147 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:40:29 crc kubenswrapper[4631]: E1204 18:40:29.239899 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:40:40 crc kubenswrapper[4631]: I1204 18:40:40.246608 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:40:40 crc kubenswrapper[4631]: E1204 18:40:40.247365 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:40:42 crc kubenswrapper[4631]: I1204 18:40:42.615308 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-fmgjn/must-gather-86p5p"] Dec 04 18:40:42 crc kubenswrapper[4631]: I1204 18:40:42.623114 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fmgjn/must-gather-86p5p" Dec 04 18:40:42 crc kubenswrapper[4631]: I1204 18:40:42.629969 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-fmgjn"/"kube-root-ca.crt" Dec 04 18:40:42 crc kubenswrapper[4631]: I1204 18:40:42.630667 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-fmgjn"/"openshift-service-ca.crt" Dec 04 18:40:42 crc kubenswrapper[4631]: I1204 18:40:42.630837 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-fmgjn"/"default-dockercfg-pgjhs" Dec 04 18:40:42 crc kubenswrapper[4631]: I1204 18:40:42.638384 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-fmgjn/must-gather-86p5p"] Dec 04 18:40:42 crc kubenswrapper[4631]: I1204 18:40:42.798071 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-798ck\" (UniqueName: \"kubernetes.io/projected/610e5420-ccf7-4deb-a457-c8fe0a7f3e0c-kube-api-access-798ck\") pod \"must-gather-86p5p\" (UID: \"610e5420-ccf7-4deb-a457-c8fe0a7f3e0c\") " pod="openshift-must-gather-fmgjn/must-gather-86p5p" Dec 04 18:40:42 crc kubenswrapper[4631]: I1204 18:40:42.798154 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/610e5420-ccf7-4deb-a457-c8fe0a7f3e0c-must-gather-output\") pod \"must-gather-86p5p\" (UID: \"610e5420-ccf7-4deb-a457-c8fe0a7f3e0c\") " pod="openshift-must-gather-fmgjn/must-gather-86p5p" Dec 04 18:40:42 crc kubenswrapper[4631]: I1204 18:40:42.900179 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-798ck\" (UniqueName: \"kubernetes.io/projected/610e5420-ccf7-4deb-a457-c8fe0a7f3e0c-kube-api-access-798ck\") pod \"must-gather-86p5p\" (UID: \"610e5420-ccf7-4deb-a457-c8fe0a7f3e0c\") " pod="openshift-must-gather-fmgjn/must-gather-86p5p" Dec 04 18:40:42 crc kubenswrapper[4631]: I1204 18:40:42.900264 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/610e5420-ccf7-4deb-a457-c8fe0a7f3e0c-must-gather-output\") pod \"must-gather-86p5p\" (UID: \"610e5420-ccf7-4deb-a457-c8fe0a7f3e0c\") " pod="openshift-must-gather-fmgjn/must-gather-86p5p" Dec 04 18:40:42 crc kubenswrapper[4631]: I1204 18:40:42.900737 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/610e5420-ccf7-4deb-a457-c8fe0a7f3e0c-must-gather-output\") pod \"must-gather-86p5p\" (UID: \"610e5420-ccf7-4deb-a457-c8fe0a7f3e0c\") " pod="openshift-must-gather-fmgjn/must-gather-86p5p" Dec 04 18:40:42 crc kubenswrapper[4631]: I1204 18:40:42.918741 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-798ck\" (UniqueName: \"kubernetes.io/projected/610e5420-ccf7-4deb-a457-c8fe0a7f3e0c-kube-api-access-798ck\") pod \"must-gather-86p5p\" (UID: \"610e5420-ccf7-4deb-a457-c8fe0a7f3e0c\") " pod="openshift-must-gather-fmgjn/must-gather-86p5p" Dec 04 18:40:42 crc kubenswrapper[4631]: I1204 18:40:42.942911 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fmgjn/must-gather-86p5p" Dec 04 18:40:43 crc kubenswrapper[4631]: I1204 18:40:43.498341 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-fmgjn/must-gather-86p5p"] Dec 04 18:40:44 crc kubenswrapper[4631]: I1204 18:40:44.285698 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fmgjn/must-gather-86p5p" event={"ID":"610e5420-ccf7-4deb-a457-c8fe0a7f3e0c","Type":"ContainerStarted","Data":"14ca2ee5b9410411025adca9cf275a1bf85801e61800dfc9347e2768018013bc"} Dec 04 18:40:49 crc kubenswrapper[4631]: I1204 18:40:49.347430 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fmgjn/must-gather-86p5p" event={"ID":"610e5420-ccf7-4deb-a457-c8fe0a7f3e0c","Type":"ContainerStarted","Data":"f35bc398fc7bdc912e75d0398e79cb32d29aa6d7cb2c800a96149f18b8dc32e6"} Dec 04 18:40:50 crc kubenswrapper[4631]: I1204 18:40:50.357957 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fmgjn/must-gather-86p5p" event={"ID":"610e5420-ccf7-4deb-a457-c8fe0a7f3e0c","Type":"ContainerStarted","Data":"95c90911e38677c65e405379a8777221c1ff4ba1dc300ff57c9c7e6bb6f60111"} Dec 04 18:40:50 crc kubenswrapper[4631]: I1204 18:40:50.386076 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-fmgjn/must-gather-86p5p" podStartSLOduration=3.019430859 podStartE2EDuration="8.386052701s" podCreationTimestamp="2025-12-04 18:40:42 +0000 UTC" firstStartedPulling="2025-12-04 18:40:43.515457452 +0000 UTC m=+4373.547699450" lastFinishedPulling="2025-12-04 18:40:48.882079294 +0000 UTC m=+4378.914321292" observedRunningTime="2025-12-04 18:40:50.381126421 +0000 UTC m=+4380.413368429" watchObservedRunningTime="2025-12-04 18:40:50.386052701 +0000 UTC m=+4380.418294729" Dec 04 18:40:51 crc kubenswrapper[4631]: I1204 18:40:51.239212 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:40:51 crc kubenswrapper[4631]: E1204 18:40:51.239616 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:40:53 crc kubenswrapper[4631]: E1204 18:40:53.177282 4631 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.194:51252->38.102.83.194:39691: read tcp 38.102.83.194:51252->38.102.83.194:39691: read: connection reset by peer Dec 04 18:40:54 crc kubenswrapper[4631]: I1204 18:40:54.122278 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-fmgjn/crc-debug-bsdpw"] Dec 04 18:40:54 crc kubenswrapper[4631]: I1204 18:40:54.123721 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fmgjn/crc-debug-bsdpw" Dec 04 18:40:54 crc kubenswrapper[4631]: I1204 18:40:54.226059 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e-host\") pod \"crc-debug-bsdpw\" (UID: \"0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e\") " pod="openshift-must-gather-fmgjn/crc-debug-bsdpw" Dec 04 18:40:54 crc kubenswrapper[4631]: I1204 18:40:54.226768 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x8cv\" (UniqueName: \"kubernetes.io/projected/0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e-kube-api-access-8x8cv\") pod \"crc-debug-bsdpw\" (UID: \"0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e\") " pod="openshift-must-gather-fmgjn/crc-debug-bsdpw" Dec 04 18:40:54 crc kubenswrapper[4631]: I1204 18:40:54.328288 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e-host\") pod \"crc-debug-bsdpw\" (UID: \"0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e\") " pod="openshift-must-gather-fmgjn/crc-debug-bsdpw" Dec 04 18:40:54 crc kubenswrapper[4631]: I1204 18:40:54.328484 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e-host\") pod \"crc-debug-bsdpw\" (UID: \"0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e\") " pod="openshift-must-gather-fmgjn/crc-debug-bsdpw" Dec 04 18:40:54 crc kubenswrapper[4631]: I1204 18:40:54.328743 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x8cv\" (UniqueName: \"kubernetes.io/projected/0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e-kube-api-access-8x8cv\") pod \"crc-debug-bsdpw\" (UID: \"0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e\") " pod="openshift-must-gather-fmgjn/crc-debug-bsdpw" Dec 04 18:40:54 crc kubenswrapper[4631]: I1204 18:40:54.509131 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x8cv\" (UniqueName: \"kubernetes.io/projected/0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e-kube-api-access-8x8cv\") pod \"crc-debug-bsdpw\" (UID: \"0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e\") " pod="openshift-must-gather-fmgjn/crc-debug-bsdpw" Dec 04 18:40:54 crc kubenswrapper[4631]: I1204 18:40:54.740615 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fmgjn/crc-debug-bsdpw" Dec 04 18:40:55 crc kubenswrapper[4631]: I1204 18:40:55.402121 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fmgjn/crc-debug-bsdpw" event={"ID":"0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e","Type":"ContainerStarted","Data":"3165cbaa51c00338f279ab2e4c0408e8b048bec7c4126915e428e92dcf5a70fe"} Dec 04 18:41:04 crc kubenswrapper[4631]: I1204 18:41:04.241732 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:41:04 crc kubenswrapper[4631]: E1204 18:41:04.242469 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:41:07 crc kubenswrapper[4631]: I1204 18:41:07.523561 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fmgjn/crc-debug-bsdpw" event={"ID":"0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e","Type":"ContainerStarted","Data":"7c9d9d00db47bbde1db8842c81f8bf5d1cfb49ed2a942f2a254b8926bbd6b881"} Dec 04 18:41:15 crc kubenswrapper[4631]: I1204 18:41:15.239050 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:41:15 crc kubenswrapper[4631]: E1204 18:41:15.239767 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:41:27 crc kubenswrapper[4631]: I1204 18:41:27.239016 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:41:27 crc kubenswrapper[4631]: E1204 18:41:27.239706 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:41:38 crc kubenswrapper[4631]: I1204 18:41:38.240488 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:41:38 crc kubenswrapper[4631]: E1204 18:41:38.241403 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:41:51 crc kubenswrapper[4631]: I1204 18:41:51.239568 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:41:51 crc kubenswrapper[4631]: E1204 18:41:51.240492 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:42:00 crc kubenswrapper[4631]: I1204 18:42:00.987257 4631 generic.go:334] "Generic (PLEG): container finished" podID="0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e" containerID="7c9d9d00db47bbde1db8842c81f8bf5d1cfb49ed2a942f2a254b8926bbd6b881" exitCode=0 Dec 04 18:42:00 crc kubenswrapper[4631]: I1204 18:42:00.987330 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fmgjn/crc-debug-bsdpw" event={"ID":"0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e","Type":"ContainerDied","Data":"7c9d9d00db47bbde1db8842c81f8bf5d1cfb49ed2a942f2a254b8926bbd6b881"} Dec 04 18:42:02 crc kubenswrapper[4631]: I1204 18:42:02.144992 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fmgjn/crc-debug-bsdpw" Dec 04 18:42:02 crc kubenswrapper[4631]: I1204 18:42:02.182015 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-fmgjn/crc-debug-bsdpw"] Dec 04 18:42:02 crc kubenswrapper[4631]: I1204 18:42:02.196358 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-fmgjn/crc-debug-bsdpw"] Dec 04 18:42:02 crc kubenswrapper[4631]: I1204 18:42:02.262261 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e-host\") pod \"0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e\" (UID: \"0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e\") " Dec 04 18:42:02 crc kubenswrapper[4631]: I1204 18:42:02.262386 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e-host" (OuterVolumeSpecName: "host") pod "0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e" (UID: "0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 18:42:02 crc kubenswrapper[4631]: I1204 18:42:02.262401 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8x8cv\" (UniqueName: \"kubernetes.io/projected/0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e-kube-api-access-8x8cv\") pod \"0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e\" (UID: \"0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e\") " Dec 04 18:42:02 crc kubenswrapper[4631]: I1204 18:42:02.263240 4631 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e-host\") on node \"crc\" DevicePath \"\"" Dec 04 18:42:02 crc kubenswrapper[4631]: I1204 18:42:02.268132 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e-kube-api-access-8x8cv" (OuterVolumeSpecName: "kube-api-access-8x8cv") pod "0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e" (UID: "0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e"). InnerVolumeSpecName "kube-api-access-8x8cv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:42:02 crc kubenswrapper[4631]: I1204 18:42:02.364943 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8x8cv\" (UniqueName: \"kubernetes.io/projected/0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e-kube-api-access-8x8cv\") on node \"crc\" DevicePath \"\"" Dec 04 18:42:03 crc kubenswrapper[4631]: I1204 18:42:03.004479 4631 scope.go:117] "RemoveContainer" containerID="7c9d9d00db47bbde1db8842c81f8bf5d1cfb49ed2a942f2a254b8926bbd6b881" Dec 04 18:42:03 crc kubenswrapper[4631]: I1204 18:42:03.004521 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fmgjn/crc-debug-bsdpw" Dec 04 18:42:03 crc kubenswrapper[4631]: I1204 18:42:03.240250 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:42:03 crc kubenswrapper[4631]: E1204 18:42:03.240551 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:42:03 crc kubenswrapper[4631]: I1204 18:42:03.402698 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-fmgjn/crc-debug-x462q"] Dec 04 18:42:03 crc kubenswrapper[4631]: E1204 18:42:03.403331 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e" containerName="container-00" Dec 04 18:42:03 crc kubenswrapper[4631]: I1204 18:42:03.403348 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e" containerName="container-00" Dec 04 18:42:03 crc kubenswrapper[4631]: I1204 18:42:03.403528 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e" containerName="container-00" Dec 04 18:42:03 crc kubenswrapper[4631]: I1204 18:42:03.404113 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fmgjn/crc-debug-x462q" Dec 04 18:42:03 crc kubenswrapper[4631]: I1204 18:42:03.483627 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a6b4d62a-0e16-4622-b5a5-6ae6466dea00-host\") pod \"crc-debug-x462q\" (UID: \"a6b4d62a-0e16-4622-b5a5-6ae6466dea00\") " pod="openshift-must-gather-fmgjn/crc-debug-x462q" Dec 04 18:42:03 crc kubenswrapper[4631]: I1204 18:42:03.483676 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmvsf\" (UniqueName: \"kubernetes.io/projected/a6b4d62a-0e16-4622-b5a5-6ae6466dea00-kube-api-access-tmvsf\") pod \"crc-debug-x462q\" (UID: \"a6b4d62a-0e16-4622-b5a5-6ae6466dea00\") " pod="openshift-must-gather-fmgjn/crc-debug-x462q" Dec 04 18:42:03 crc kubenswrapper[4631]: I1204 18:42:03.585688 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a6b4d62a-0e16-4622-b5a5-6ae6466dea00-host\") pod \"crc-debug-x462q\" (UID: \"a6b4d62a-0e16-4622-b5a5-6ae6466dea00\") " pod="openshift-must-gather-fmgjn/crc-debug-x462q" Dec 04 18:42:03 crc kubenswrapper[4631]: I1204 18:42:03.585739 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmvsf\" (UniqueName: \"kubernetes.io/projected/a6b4d62a-0e16-4622-b5a5-6ae6466dea00-kube-api-access-tmvsf\") pod \"crc-debug-x462q\" (UID: \"a6b4d62a-0e16-4622-b5a5-6ae6466dea00\") " pod="openshift-must-gather-fmgjn/crc-debug-x462q" Dec 04 18:42:03 crc kubenswrapper[4631]: I1204 18:42:03.585805 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a6b4d62a-0e16-4622-b5a5-6ae6466dea00-host\") pod \"crc-debug-x462q\" (UID: \"a6b4d62a-0e16-4622-b5a5-6ae6466dea00\") " pod="openshift-must-gather-fmgjn/crc-debug-x462q" Dec 04 18:42:03 crc kubenswrapper[4631]: I1204 18:42:03.604080 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmvsf\" (UniqueName: \"kubernetes.io/projected/a6b4d62a-0e16-4622-b5a5-6ae6466dea00-kube-api-access-tmvsf\") pod \"crc-debug-x462q\" (UID: \"a6b4d62a-0e16-4622-b5a5-6ae6466dea00\") " pod="openshift-must-gather-fmgjn/crc-debug-x462q" Dec 04 18:42:03 crc kubenswrapper[4631]: I1204 18:42:03.721731 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fmgjn/crc-debug-x462q" Dec 04 18:42:04 crc kubenswrapper[4631]: I1204 18:42:04.013243 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fmgjn/crc-debug-x462q" event={"ID":"a6b4d62a-0e16-4622-b5a5-6ae6466dea00","Type":"ContainerStarted","Data":"3a087177b7120bb79828419a7a41b37158cfbfdf8c1f9f92a9b34d1a2ec21659"} Dec 04 18:42:04 crc kubenswrapper[4631]: I1204 18:42:04.013294 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fmgjn/crc-debug-x462q" event={"ID":"a6b4d62a-0e16-4622-b5a5-6ae6466dea00","Type":"ContainerStarted","Data":"2b2db1ea2609584f282afcf90f5596ec621835c415f3f80cf8a9e1cb1a4dbcc2"} Dec 04 18:42:04 crc kubenswrapper[4631]: I1204 18:42:04.026977 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-fmgjn/crc-debug-x462q" podStartSLOduration=1.026960843 podStartE2EDuration="1.026960843s" podCreationTimestamp="2025-12-04 18:42:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 18:42:04.025996666 +0000 UTC m=+4454.058238664" watchObservedRunningTime="2025-12-04 18:42:04.026960843 +0000 UTC m=+4454.059202841" Dec 04 18:42:04 crc kubenswrapper[4631]: I1204 18:42:04.248577 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e" path="/var/lib/kubelet/pods/0587dff8-67c4-498f-8ce3-2f9fdb9e4d5e/volumes" Dec 04 18:42:05 crc kubenswrapper[4631]: I1204 18:42:05.043621 4631 generic.go:334] "Generic (PLEG): container finished" podID="a6b4d62a-0e16-4622-b5a5-6ae6466dea00" containerID="3a087177b7120bb79828419a7a41b37158cfbfdf8c1f9f92a9b34d1a2ec21659" exitCode=0 Dec 04 18:42:05 crc kubenswrapper[4631]: I1204 18:42:05.043680 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fmgjn/crc-debug-x462q" event={"ID":"a6b4d62a-0e16-4622-b5a5-6ae6466dea00","Type":"ContainerDied","Data":"3a087177b7120bb79828419a7a41b37158cfbfdf8c1f9f92a9b34d1a2ec21659"} Dec 04 18:42:06 crc kubenswrapper[4631]: I1204 18:42:06.150262 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fmgjn/crc-debug-x462q" Dec 04 18:42:06 crc kubenswrapper[4631]: I1204 18:42:06.181592 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-fmgjn/crc-debug-x462q"] Dec 04 18:42:06 crc kubenswrapper[4631]: I1204 18:42:06.189578 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-fmgjn/crc-debug-x462q"] Dec 04 18:42:06 crc kubenswrapper[4631]: I1204 18:42:06.228733 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a6b4d62a-0e16-4622-b5a5-6ae6466dea00-host\") pod \"a6b4d62a-0e16-4622-b5a5-6ae6466dea00\" (UID: \"a6b4d62a-0e16-4622-b5a5-6ae6466dea00\") " Dec 04 18:42:06 crc kubenswrapper[4631]: I1204 18:42:06.228850 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmvsf\" (UniqueName: \"kubernetes.io/projected/a6b4d62a-0e16-4622-b5a5-6ae6466dea00-kube-api-access-tmvsf\") pod \"a6b4d62a-0e16-4622-b5a5-6ae6466dea00\" (UID: \"a6b4d62a-0e16-4622-b5a5-6ae6466dea00\") " Dec 04 18:42:06 crc kubenswrapper[4631]: I1204 18:42:06.228876 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6b4d62a-0e16-4622-b5a5-6ae6466dea00-host" (OuterVolumeSpecName: "host") pod "a6b4d62a-0e16-4622-b5a5-6ae6466dea00" (UID: "a6b4d62a-0e16-4622-b5a5-6ae6466dea00"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 18:42:06 crc kubenswrapper[4631]: I1204 18:42:06.229225 4631 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a6b4d62a-0e16-4622-b5a5-6ae6466dea00-host\") on node \"crc\" DevicePath \"\"" Dec 04 18:42:06 crc kubenswrapper[4631]: I1204 18:42:06.238609 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6b4d62a-0e16-4622-b5a5-6ae6466dea00-kube-api-access-tmvsf" (OuterVolumeSpecName: "kube-api-access-tmvsf") pod "a6b4d62a-0e16-4622-b5a5-6ae6466dea00" (UID: "a6b4d62a-0e16-4622-b5a5-6ae6466dea00"). InnerVolumeSpecName "kube-api-access-tmvsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:42:06 crc kubenswrapper[4631]: I1204 18:42:06.269649 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6b4d62a-0e16-4622-b5a5-6ae6466dea00" path="/var/lib/kubelet/pods/a6b4d62a-0e16-4622-b5a5-6ae6466dea00/volumes" Dec 04 18:42:06 crc kubenswrapper[4631]: I1204 18:42:06.330700 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmvsf\" (UniqueName: \"kubernetes.io/projected/a6b4d62a-0e16-4622-b5a5-6ae6466dea00-kube-api-access-tmvsf\") on node \"crc\" DevicePath \"\"" Dec 04 18:42:07 crc kubenswrapper[4631]: I1204 18:42:07.063714 4631 scope.go:117] "RemoveContainer" containerID="3a087177b7120bb79828419a7a41b37158cfbfdf8c1f9f92a9b34d1a2ec21659" Dec 04 18:42:07 crc kubenswrapper[4631]: I1204 18:42:07.063906 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fmgjn/crc-debug-x462q" Dec 04 18:42:07 crc kubenswrapper[4631]: I1204 18:42:07.387571 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-fmgjn/crc-debug-tt4vc"] Dec 04 18:42:07 crc kubenswrapper[4631]: E1204 18:42:07.388010 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6b4d62a-0e16-4622-b5a5-6ae6466dea00" containerName="container-00" Dec 04 18:42:07 crc kubenswrapper[4631]: I1204 18:42:07.388023 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6b4d62a-0e16-4622-b5a5-6ae6466dea00" containerName="container-00" Dec 04 18:42:07 crc kubenswrapper[4631]: I1204 18:42:07.388199 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6b4d62a-0e16-4622-b5a5-6ae6466dea00" containerName="container-00" Dec 04 18:42:07 crc kubenswrapper[4631]: I1204 18:42:07.388800 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fmgjn/crc-debug-tt4vc" Dec 04 18:42:07 crc kubenswrapper[4631]: I1204 18:42:07.451460 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bd01e760-fb82-4bd1-a675-e2e176a534c2-host\") pod \"crc-debug-tt4vc\" (UID: \"bd01e760-fb82-4bd1-a675-e2e176a534c2\") " pod="openshift-must-gather-fmgjn/crc-debug-tt4vc" Dec 04 18:42:07 crc kubenswrapper[4631]: I1204 18:42:07.451543 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67m5c\" (UniqueName: \"kubernetes.io/projected/bd01e760-fb82-4bd1-a675-e2e176a534c2-kube-api-access-67m5c\") pod \"crc-debug-tt4vc\" (UID: \"bd01e760-fb82-4bd1-a675-e2e176a534c2\") " pod="openshift-must-gather-fmgjn/crc-debug-tt4vc" Dec 04 18:42:07 crc kubenswrapper[4631]: I1204 18:42:07.553428 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bd01e760-fb82-4bd1-a675-e2e176a534c2-host\") pod \"crc-debug-tt4vc\" (UID: \"bd01e760-fb82-4bd1-a675-e2e176a534c2\") " pod="openshift-must-gather-fmgjn/crc-debug-tt4vc" Dec 04 18:42:07 crc kubenswrapper[4631]: I1204 18:42:07.553732 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67m5c\" (UniqueName: \"kubernetes.io/projected/bd01e760-fb82-4bd1-a675-e2e176a534c2-kube-api-access-67m5c\") pod \"crc-debug-tt4vc\" (UID: \"bd01e760-fb82-4bd1-a675-e2e176a534c2\") " pod="openshift-must-gather-fmgjn/crc-debug-tt4vc" Dec 04 18:42:07 crc kubenswrapper[4631]: I1204 18:42:07.553531 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bd01e760-fb82-4bd1-a675-e2e176a534c2-host\") pod \"crc-debug-tt4vc\" (UID: \"bd01e760-fb82-4bd1-a675-e2e176a534c2\") " pod="openshift-must-gather-fmgjn/crc-debug-tt4vc" Dec 04 18:42:07 crc kubenswrapper[4631]: I1204 18:42:07.583418 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67m5c\" (UniqueName: \"kubernetes.io/projected/bd01e760-fb82-4bd1-a675-e2e176a534c2-kube-api-access-67m5c\") pod \"crc-debug-tt4vc\" (UID: \"bd01e760-fb82-4bd1-a675-e2e176a534c2\") " pod="openshift-must-gather-fmgjn/crc-debug-tt4vc" Dec 04 18:42:07 crc kubenswrapper[4631]: I1204 18:42:07.704219 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fmgjn/crc-debug-tt4vc" Dec 04 18:42:08 crc kubenswrapper[4631]: I1204 18:42:08.075855 4631 generic.go:334] "Generic (PLEG): container finished" podID="bd01e760-fb82-4bd1-a675-e2e176a534c2" containerID="79e396cf095a20cc078a52831509efef92d0f8c53fd871d6d8973bfa3d4e7afb" exitCode=0 Dec 04 18:42:08 crc kubenswrapper[4631]: I1204 18:42:08.075927 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fmgjn/crc-debug-tt4vc" event={"ID":"bd01e760-fb82-4bd1-a675-e2e176a534c2","Type":"ContainerDied","Data":"79e396cf095a20cc078a52831509efef92d0f8c53fd871d6d8973bfa3d4e7afb"} Dec 04 18:42:08 crc kubenswrapper[4631]: I1204 18:42:08.076221 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fmgjn/crc-debug-tt4vc" event={"ID":"bd01e760-fb82-4bd1-a675-e2e176a534c2","Type":"ContainerStarted","Data":"7a4718f2ef41eb6473b1d595d16a49a56064d36af67c185fbb2ac0f2b498c3bb"} Dec 04 18:42:08 crc kubenswrapper[4631]: I1204 18:42:08.119606 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-fmgjn/crc-debug-tt4vc"] Dec 04 18:42:08 crc kubenswrapper[4631]: I1204 18:42:08.130930 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-fmgjn/crc-debug-tt4vc"] Dec 04 18:42:09 crc kubenswrapper[4631]: I1204 18:42:09.197966 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fmgjn/crc-debug-tt4vc" Dec 04 18:42:09 crc kubenswrapper[4631]: I1204 18:42:09.283893 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67m5c\" (UniqueName: \"kubernetes.io/projected/bd01e760-fb82-4bd1-a675-e2e176a534c2-kube-api-access-67m5c\") pod \"bd01e760-fb82-4bd1-a675-e2e176a534c2\" (UID: \"bd01e760-fb82-4bd1-a675-e2e176a534c2\") " Dec 04 18:42:09 crc kubenswrapper[4631]: I1204 18:42:09.284018 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bd01e760-fb82-4bd1-a675-e2e176a534c2-host\") pod \"bd01e760-fb82-4bd1-a675-e2e176a534c2\" (UID: \"bd01e760-fb82-4bd1-a675-e2e176a534c2\") " Dec 04 18:42:09 crc kubenswrapper[4631]: I1204 18:42:09.284549 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bd01e760-fb82-4bd1-a675-e2e176a534c2-host" (OuterVolumeSpecName: "host") pod "bd01e760-fb82-4bd1-a675-e2e176a534c2" (UID: "bd01e760-fb82-4bd1-a675-e2e176a534c2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 18:42:09 crc kubenswrapper[4631]: I1204 18:42:09.299562 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd01e760-fb82-4bd1-a675-e2e176a534c2-kube-api-access-67m5c" (OuterVolumeSpecName: "kube-api-access-67m5c") pod "bd01e760-fb82-4bd1-a675-e2e176a534c2" (UID: "bd01e760-fb82-4bd1-a675-e2e176a534c2"). InnerVolumeSpecName "kube-api-access-67m5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:42:09 crc kubenswrapper[4631]: I1204 18:42:09.386435 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67m5c\" (UniqueName: \"kubernetes.io/projected/bd01e760-fb82-4bd1-a675-e2e176a534c2-kube-api-access-67m5c\") on node \"crc\" DevicePath \"\"" Dec 04 18:42:09 crc kubenswrapper[4631]: I1204 18:42:09.386470 4631 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bd01e760-fb82-4bd1-a675-e2e176a534c2-host\") on node \"crc\" DevicePath \"\"" Dec 04 18:42:10 crc kubenswrapper[4631]: I1204 18:42:10.110220 4631 scope.go:117] "RemoveContainer" containerID="79e396cf095a20cc078a52831509efef92d0f8c53fd871d6d8973bfa3d4e7afb" Dec 04 18:42:10 crc kubenswrapper[4631]: I1204 18:42:10.110629 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fmgjn/crc-debug-tt4vc" Dec 04 18:42:10 crc kubenswrapper[4631]: I1204 18:42:10.249446 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd01e760-fb82-4bd1-a675-e2e176a534c2" path="/var/lib/kubelet/pods/bd01e760-fb82-4bd1-a675-e2e176a534c2/volumes" Dec 04 18:42:16 crc kubenswrapper[4631]: I1204 18:42:16.240079 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:42:16 crc kubenswrapper[4631]: E1204 18:42:16.240841 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:42:29 crc kubenswrapper[4631]: I1204 18:42:29.239519 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:42:29 crc kubenswrapper[4631]: E1204 18:42:29.240341 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:42:31 crc kubenswrapper[4631]: I1204 18:42:31.520770 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6866f4d6b8-5wp55_f65b2092-9992-4e4d-be14-6ea85af840a0/barbican-api/0.log" Dec 04 18:42:31 crc kubenswrapper[4631]: I1204 18:42:31.668339 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6866f4d6b8-5wp55_f65b2092-9992-4e4d-be14-6ea85af840a0/barbican-api-log/0.log" Dec 04 18:42:31 crc kubenswrapper[4631]: I1204 18:42:31.760709 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-75cd87c688-xlr7b_b5516312-5bde-4c7d-8910-bf75f2a98812/barbican-keystone-listener/0.log" Dec 04 18:42:31 crc kubenswrapper[4631]: I1204 18:42:31.898034 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-75cd87c688-xlr7b_b5516312-5bde-4c7d-8910-bf75f2a98812/barbican-keystone-listener-log/0.log" Dec 04 18:42:32 crc kubenswrapper[4631]: I1204 18:42:32.034125 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-f9b7c48cf-xdj7r_1de91a80-bddc-4f80-bf05-0d1aba161730/barbican-worker/0.log" Dec 04 18:42:32 crc kubenswrapper[4631]: I1204 18:42:32.070283 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-f9b7c48cf-xdj7r_1de91a80-bddc-4f80-bf05-0d1aba161730/barbican-worker-log/0.log" Dec 04 18:42:32 crc kubenswrapper[4631]: I1204 18:42:32.250325 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn_9789c9a8-e2ff-4344-a946-81d8a8ef26fe/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:42:32 crc kubenswrapper[4631]: I1204 18:42:32.506735 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b4871a6a-2cea-402a-9dfe-e72887258bb5/ceilometer-notification-agent/0.log" Dec 04 18:42:32 crc kubenswrapper[4631]: I1204 18:42:32.675312 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b4871a6a-2cea-402a-9dfe-e72887258bb5/ceilometer-central-agent/0.log" Dec 04 18:42:32 crc kubenswrapper[4631]: I1204 18:42:32.733574 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b4871a6a-2cea-402a-9dfe-e72887258bb5/proxy-httpd/0.log" Dec 04 18:42:32 crc kubenswrapper[4631]: I1204 18:42:32.827596 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b4871a6a-2cea-402a-9dfe-e72887258bb5/sg-core/0.log" Dec 04 18:42:33 crc kubenswrapper[4631]: I1204 18:42:33.013842 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_6b9d56f1-c2cf-471c-934b-15a0497af44b/cinder-api/0.log" Dec 04 18:42:33 crc kubenswrapper[4631]: I1204 18:42:33.036888 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_6b9d56f1-c2cf-471c-934b-15a0497af44b/cinder-api-log/0.log" Dec 04 18:42:33 crc kubenswrapper[4631]: I1204 18:42:33.176246 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_f5599bd7-2ca5-4217-a0bd-785b3fb612b7/cinder-scheduler/0.log" Dec 04 18:42:33 crc kubenswrapper[4631]: I1204 18:42:33.309917 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_f5599bd7-2ca5-4217-a0bd-785b3fb612b7/probe/0.log" Dec 04 18:42:33 crc kubenswrapper[4631]: I1204 18:42:33.495009 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-bwghg_9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:42:33 crc kubenswrapper[4631]: I1204 18:42:33.567111 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s_c303f9ff-2337-47a3-8e07-4ace557cc99a/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:42:33 crc kubenswrapper[4631]: I1204 18:42:33.811557 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7b659bdd7f-ndp7s_52592900-79a1-4fa6-8eb3-628f25972f5f/init/0.log" Dec 04 18:42:33 crc kubenswrapper[4631]: I1204 18:42:33.978887 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7b659bdd7f-ndp7s_52592900-79a1-4fa6-8eb3-628f25972f5f/init/0.log" Dec 04 18:42:34 crc kubenswrapper[4631]: I1204 18:42:34.086903 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7b659bdd7f-ndp7s_52592900-79a1-4fa6-8eb3-628f25972f5f/dnsmasq-dns/0.log" Dec 04 18:42:34 crc kubenswrapper[4631]: I1204 18:42:34.111336 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq_8a60b6a3-2e66-46ad-987f-9c6aac93e03f/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:42:34 crc kubenswrapper[4631]: I1204 18:42:34.270109 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_7f61006f-d20d-43ed-94d6-95615925184f/glance-httpd/0.log" Dec 04 18:42:34 crc kubenswrapper[4631]: I1204 18:42:34.384848 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_7f61006f-d20d-43ed-94d6-95615925184f/glance-log/0.log" Dec 04 18:42:34 crc kubenswrapper[4631]: I1204 18:42:34.502088 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f773d050-d51b-4753-9be0-8f5a91c674bc/glance-httpd/0.log" Dec 04 18:42:34 crc kubenswrapper[4631]: I1204 18:42:34.533748 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f773d050-d51b-4753-9be0-8f5a91c674bc/glance-log/0.log" Dec 04 18:42:34 crc kubenswrapper[4631]: I1204 18:42:34.692822 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-77d5fd455b-8kwkp_78aafb4d-470c-477d-bfe6-5b7a29b79fc0/horizon/1.log" Dec 04 18:42:34 crc kubenswrapper[4631]: I1204 18:42:34.892456 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-77d5fd455b-8kwkp_78aafb4d-470c-477d-bfe6-5b7a29b79fc0/horizon/0.log" Dec 04 18:42:35 crc kubenswrapper[4631]: I1204 18:42:35.122673 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv_52d13c44-1eee-4a4b-bd73-982e9d57f0d8/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:42:35 crc kubenswrapper[4631]: I1204 18:42:35.192772 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-77d5fd455b-8kwkp_78aafb4d-470c-477d-bfe6-5b7a29b79fc0/horizon-log/0.log" Dec 04 18:42:35 crc kubenswrapper[4631]: I1204 18:42:35.453270 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-5zdzl_191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:42:35 crc kubenswrapper[4631]: I1204 18:42:35.809647 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29414521-6wj5d_bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958/keystone-cron/0.log" Dec 04 18:42:35 crc kubenswrapper[4631]: I1204 18:42:35.880132 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_83a88a9d-413f-40ce-bae5-624b4cfe00c9/kube-state-metrics/0.log" Dec 04 18:42:36 crc kubenswrapper[4631]: I1204 18:42:36.019968 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-589bf6fb8-62vft_138d8c39-c5e9-48bf-83b7-efc22bc3ec1e/keystone-api/0.log" Dec 04 18:42:36 crc kubenswrapper[4631]: I1204 18:42:36.127903 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8_d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:42:36 crc kubenswrapper[4631]: I1204 18:42:36.973205 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg_5bba5c47-0692-477b-9483-f80218571763/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:42:37 crc kubenswrapper[4631]: I1204 18:42:37.086223 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6d6c6b7549-c7hqg_7dc0a764-9aea-494f-b71e-eb0df5cf3d66/neutron-httpd/0.log" Dec 04 18:42:37 crc kubenswrapper[4631]: I1204 18:42:37.230217 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6d6c6b7549-c7hqg_7dc0a764-9aea-494f-b71e-eb0df5cf3d66/neutron-api/0.log" Dec 04 18:42:37 crc kubenswrapper[4631]: I1204 18:42:37.839025 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_fe9c3ab8-326e-49a0-8fe3-b54c15c89051/nova-cell0-conductor-conductor/0.log" Dec 04 18:42:38 crc kubenswrapper[4631]: I1204 18:42:38.012464 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_8f18cd83-a5c6-455c-87de-2549f96b9073/nova-cell1-conductor-conductor/0.log" Dec 04 18:42:38 crc kubenswrapper[4631]: I1204 18:42:38.537031 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_b96713c5-6fba-4ee6-9111-5aedf572a172/nova-api-log/0.log" Dec 04 18:42:38 crc kubenswrapper[4631]: I1204 18:42:38.690334 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_b96713c5-6fba-4ee6-9111-5aedf572a172/nova-api-api/0.log" Dec 04 18:42:38 crc kubenswrapper[4631]: I1204 18:42:38.714153 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_73bece06-ddcd-4bd7-9f77-1c7551dd5c10/nova-cell1-novncproxy-novncproxy/0.log" Dec 04 18:42:38 crc kubenswrapper[4631]: I1204 18:42:38.919591 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-lqlcs_93cd2870-edd3-4b7f-9868-6c437dcf3164/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:42:39 crc kubenswrapper[4631]: I1204 18:42:39.190595 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_df12d5a5-6083-4b46-b6bb-8894eb4f421b/nova-metadata-log/0.log" Dec 04 18:42:39 crc kubenswrapper[4631]: I1204 18:42:39.573860 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_4c1e28f2-5820-4e06-a20b-a9062d8280be/mysql-bootstrap/0.log" Dec 04 18:42:39 crc kubenswrapper[4631]: I1204 18:42:39.875984 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_4c1e28f2-5820-4e06-a20b-a9062d8280be/mysql-bootstrap/0.log" Dec 04 18:42:39 crc kubenswrapper[4631]: I1204 18:42:39.943336 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_77627c2f-d3c1-4699-9c42-8ab97657f312/nova-scheduler-scheduler/0.log" Dec 04 18:42:40 crc kubenswrapper[4631]: I1204 18:42:40.609666 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_0c7df533-7298-4204-aeca-992631c9ccb6/mysql-bootstrap/0.log" Dec 04 18:42:40 crc kubenswrapper[4631]: I1204 18:42:40.611619 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_4c1e28f2-5820-4e06-a20b-a9062d8280be/galera/0.log" Dec 04 18:42:40 crc kubenswrapper[4631]: I1204 18:42:40.678732 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_df12d5a5-6083-4b46-b6bb-8894eb4f421b/nova-metadata-metadata/0.log" Dec 04 18:42:40 crc kubenswrapper[4631]: I1204 18:42:40.882992 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_0c7df533-7298-4204-aeca-992631c9ccb6/mysql-bootstrap/0.log" Dec 04 18:42:40 crc kubenswrapper[4631]: I1204 18:42:40.888299 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_0c7df533-7298-4204-aeca-992631c9ccb6/galera/0.log" Dec 04 18:42:40 crc kubenswrapper[4631]: I1204 18:42:40.964628 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_c65652e6-704f-4f88-9b9d-435868d33e0e/openstackclient/0.log" Dec 04 18:42:41 crc kubenswrapper[4631]: I1204 18:42:41.230916 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-2vnfm_10032f10-bb41-4039-a44d-ca336b45d4df/ovn-controller/0.log" Dec 04 18:42:41 crc kubenswrapper[4631]: I1204 18:42:41.366267 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-dnt84_ee500515-c2eb-4f8e-b022-1d4f1bb8106e/openstack-network-exporter/0.log" Dec 04 18:42:41 crc kubenswrapper[4631]: I1204 18:42:41.617345 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gsp75_8d34f815-7011-438b-8c8c-45363f359101/ovsdb-server-init/0.log" Dec 04 18:42:41 crc kubenswrapper[4631]: I1204 18:42:41.791255 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gsp75_8d34f815-7011-438b-8c8c-45363f359101/ovsdb-server-init/0.log" Dec 04 18:42:41 crc kubenswrapper[4631]: I1204 18:42:41.820669 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gsp75_8d34f815-7011-438b-8c8c-45363f359101/ovsdb-server/0.log" Dec 04 18:42:41 crc kubenswrapper[4631]: I1204 18:42:41.838054 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gsp75_8d34f815-7011-438b-8c8c-45363f359101/ovs-vswitchd/0.log" Dec 04 18:42:42 crc kubenswrapper[4631]: I1204 18:42:42.138880 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-qspr8_e9c5fea5-b0f9-4894-bf45-699c8b23d9f1/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:42:42 crc kubenswrapper[4631]: I1204 18:42:42.220103 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_5a597650-5fec-493b-bda8-93bb60985ae5/openstack-network-exporter/0.log" Dec 04 18:42:42 crc kubenswrapper[4631]: I1204 18:42:42.249906 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:42:42 crc kubenswrapper[4631]: E1204 18:42:42.250110 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:42:42 crc kubenswrapper[4631]: I1204 18:42:42.263316 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_5a597650-5fec-493b-bda8-93bb60985ae5/ovn-northd/0.log" Dec 04 18:42:42 crc kubenswrapper[4631]: I1204 18:42:42.486499 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_fe7546ca-3ffc-4d40-b075-00254781f008/openstack-network-exporter/0.log" Dec 04 18:42:42 crc kubenswrapper[4631]: I1204 18:42:42.617274 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_fe7546ca-3ffc-4d40-b075-00254781f008/ovsdbserver-nb/0.log" Dec 04 18:42:42 crc kubenswrapper[4631]: I1204 18:42:42.820299 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b0339f65-9966-4790-a7d2-954145c70f7b/ovsdbserver-sb/0.log" Dec 04 18:42:42 crc kubenswrapper[4631]: I1204 18:42:42.848456 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b0339f65-9966-4790-a7d2-954145c70f7b/openstack-network-exporter/0.log" Dec 04 18:42:43 crc kubenswrapper[4631]: I1204 18:42:43.127227 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-776f95766d-5qctj_4b648789-3c38-485a-ad71-70566e8684fb/placement-api/0.log" Dec 04 18:42:43 crc kubenswrapper[4631]: I1204 18:42:43.394594 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1ba76133-7ea9-4b93-abdd-426b64c09c9d/setup-container/0.log" Dec 04 18:42:43 crc kubenswrapper[4631]: I1204 18:42:43.505247 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-776f95766d-5qctj_4b648789-3c38-485a-ad71-70566e8684fb/placement-log/0.log" Dec 04 18:42:43 crc kubenswrapper[4631]: I1204 18:42:43.626650 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1ba76133-7ea9-4b93-abdd-426b64c09c9d/setup-container/0.log" Dec 04 18:42:43 crc kubenswrapper[4631]: I1204 18:42:43.640556 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1ba76133-7ea9-4b93-abdd-426b64c09c9d/rabbitmq/0.log" Dec 04 18:42:43 crc kubenswrapper[4631]: I1204 18:42:43.774754 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9ef0c479-0169-423e-9619-fbf9f7e63a97/setup-container/0.log" Dec 04 18:42:44 crc kubenswrapper[4631]: I1204 18:42:44.267729 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9ef0c479-0169-423e-9619-fbf9f7e63a97/setup-container/0.log" Dec 04 18:42:44 crc kubenswrapper[4631]: I1204 18:42:44.360950 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9ef0c479-0169-423e-9619-fbf9f7e63a97/rabbitmq/0.log" Dec 04 18:42:44 crc kubenswrapper[4631]: I1204 18:42:44.442231 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj_c2ec68e5-0f90-46f3-b0f7-1fdc8956c306/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:42:44 crc kubenswrapper[4631]: I1204 18:42:44.740710 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg_583c1d0c-fb4d-4d25-9d84-798d63586401/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:42:44 crc kubenswrapper[4631]: I1204 18:42:44.872564 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-5j289_71ccfaca-2557-4840-941c-a36d55ebd0bc/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:42:45 crc kubenswrapper[4631]: I1204 18:42:45.098040 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-p2h9d_c331e4d1-1da9-4a7f-bd67-f24a4c76b971/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:42:45 crc kubenswrapper[4631]: I1204 18:42:45.167974 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-8c42n_6d2a06a0-e76d-469a-bf34-4d32dd8b0b84/ssh-known-hosts-edpm-deployment/0.log" Dec 04 18:42:45 crc kubenswrapper[4631]: I1204 18:42:45.483072 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5dd47bd8d5-qcz5l_2f9535d3-d81d-4e55-bc05-f36a8dd6b731/proxy-server/0.log" Dec 04 18:42:45 crc kubenswrapper[4631]: I1204 18:42:45.623956 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5dd47bd8d5-qcz5l_2f9535d3-d81d-4e55-bc05-f36a8dd6b731/proxy-httpd/0.log" Dec 04 18:42:45 crc kubenswrapper[4631]: I1204 18:42:45.648603 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-xjxf8_23cc29b2-48d6-42f1-a2ff-fbd418d0b47f/swift-ring-rebalance/0.log" Dec 04 18:42:45 crc kubenswrapper[4631]: I1204 18:42:45.844247 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/account-auditor/0.log" Dec 04 18:42:45 crc kubenswrapper[4631]: I1204 18:42:45.910678 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/account-reaper/0.log" Dec 04 18:42:45 crc kubenswrapper[4631]: I1204 18:42:45.940079 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/account-replicator/0.log" Dec 04 18:42:45 crc kubenswrapper[4631]: I1204 18:42:45.990179 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/account-server/0.log" Dec 04 18:42:46 crc kubenswrapper[4631]: I1204 18:42:46.148078 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/container-auditor/0.log" Dec 04 18:42:46 crc kubenswrapper[4631]: I1204 18:42:46.278618 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/container-server/0.log" Dec 04 18:42:46 crc kubenswrapper[4631]: I1204 18:42:46.279991 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/container-replicator/0.log" Dec 04 18:42:46 crc kubenswrapper[4631]: I1204 18:42:46.297282 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/container-updater/0.log" Dec 04 18:42:46 crc kubenswrapper[4631]: I1204 18:42:46.510322 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/object-auditor/0.log" Dec 04 18:42:46 crc kubenswrapper[4631]: I1204 18:42:46.588493 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/object-replicator/0.log" Dec 04 18:42:46 crc kubenswrapper[4631]: I1204 18:42:46.619134 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/object-server/0.log" Dec 04 18:42:46 crc kubenswrapper[4631]: I1204 18:42:46.641547 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/object-expirer/0.log" Dec 04 18:42:46 crc kubenswrapper[4631]: I1204 18:42:46.752760 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/object-updater/0.log" Dec 04 18:42:46 crc kubenswrapper[4631]: I1204 18:42:46.874572 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/rsync/0.log" Dec 04 18:42:46 crc kubenswrapper[4631]: I1204 18:42:46.901007 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/swift-recon-cron/0.log" Dec 04 18:42:47 crc kubenswrapper[4631]: I1204 18:42:47.225603 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6_15251242-87d0-444d-aa7f-f0b8936efd96/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:42:47 crc kubenswrapper[4631]: I1204 18:42:47.277445 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_36e446e6-248d-4a69-80f1-585a9bfcd4cf/tempest-tests-tempest-tests-runner/0.log" Dec 04 18:42:47 crc kubenswrapper[4631]: I1204 18:42:47.599102 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_cc75bbe8-1619-48a6-8dd8-4353f50fac82/test-operator-logs-container/0.log" Dec 04 18:42:47 crc kubenswrapper[4631]: I1204 18:42:47.716092 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-28v4d_02fda8da-e708-4897-9997-9c71901e45b7/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:42:56 crc kubenswrapper[4631]: I1204 18:42:56.240409 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:42:56 crc kubenswrapper[4631]: E1204 18:42:56.241181 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:42:57 crc kubenswrapper[4631]: I1204 18:42:57.213657 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_e136634f-2944-42c8-bd08-517411c92754/memcached/0.log" Dec 04 18:43:10 crc kubenswrapper[4631]: I1204 18:43:10.250140 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:43:10 crc kubenswrapper[4631]: E1204 18:43:10.251262 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:43:17 crc kubenswrapper[4631]: I1204 18:43:17.759849 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs_ad8edd24-7550-455a-b394-343d4e2ca11b/util/0.log" Dec 04 18:43:17 crc kubenswrapper[4631]: I1204 18:43:17.948995 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs_ad8edd24-7550-455a-b394-343d4e2ca11b/util/0.log" Dec 04 18:43:17 crc kubenswrapper[4631]: I1204 18:43:17.999073 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs_ad8edd24-7550-455a-b394-343d4e2ca11b/pull/0.log" Dec 04 18:43:18 crc kubenswrapper[4631]: I1204 18:43:18.021056 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs_ad8edd24-7550-455a-b394-343d4e2ca11b/pull/0.log" Dec 04 18:43:18 crc kubenswrapper[4631]: I1204 18:43:18.176137 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs_ad8edd24-7550-455a-b394-343d4e2ca11b/extract/0.log" Dec 04 18:43:18 crc kubenswrapper[4631]: I1204 18:43:18.202188 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs_ad8edd24-7550-455a-b394-343d4e2ca11b/pull/0.log" Dec 04 18:43:18 crc kubenswrapper[4631]: I1204 18:43:18.260617 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs_ad8edd24-7550-455a-b394-343d4e2ca11b/util/0.log" Dec 04 18:43:18 crc kubenswrapper[4631]: I1204 18:43:18.438018 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-67cns_30be0340-cc50-4244-9b27-7e41f86bf113/kube-rbac-proxy/0.log" Dec 04 18:43:18 crc kubenswrapper[4631]: I1204 18:43:18.516419 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-67cns_30be0340-cc50-4244-9b27-7e41f86bf113/manager/0.log" Dec 04 18:43:18 crc kubenswrapper[4631]: I1204 18:43:18.523176 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-9s4jp_709a39e5-9fe0-4861-8761-774f26a4a315/kube-rbac-proxy/0.log" Dec 04 18:43:18 crc kubenswrapper[4631]: I1204 18:43:18.675214 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-9s4jp_709a39e5-9fe0-4861-8761-774f26a4a315/manager/0.log" Dec 04 18:43:18 crc kubenswrapper[4631]: I1204 18:43:18.753502 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-p6vp2_dd2fd0ee-2bee-4cd2-9c24-a0c0dce37b46/kube-rbac-proxy/0.log" Dec 04 18:43:18 crc kubenswrapper[4631]: I1204 18:43:18.804911 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-p6vp2_dd2fd0ee-2bee-4cd2-9c24-a0c0dce37b46/manager/0.log" Dec 04 18:43:18 crc kubenswrapper[4631]: I1204 18:43:18.977549 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-ldclc_32886d79-72a7-4318-8098-718f0f55f61e/kube-rbac-proxy/0.log" Dec 04 18:43:19 crc kubenswrapper[4631]: I1204 18:43:19.051339 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-ldclc_32886d79-72a7-4318-8098-718f0f55f61e/manager/0.log" Dec 04 18:43:19 crc kubenswrapper[4631]: I1204 18:43:19.145832 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-jwf42_1d24a40a-06b2-43e4-9921-05dd2e8f27ea/kube-rbac-proxy/0.log" Dec 04 18:43:19 crc kubenswrapper[4631]: I1204 18:43:19.237585 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-jwf42_1d24a40a-06b2-43e4-9921-05dd2e8f27ea/manager/0.log" Dec 04 18:43:19 crc kubenswrapper[4631]: I1204 18:43:19.296847 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-cz8hk_5a715ee8-c048-4447-b3fc-5f94121c0e7e/kube-rbac-proxy/0.log" Dec 04 18:43:19 crc kubenswrapper[4631]: I1204 18:43:19.386023 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-cz8hk_5a715ee8-c048-4447-b3fc-5f94121c0e7e/manager/0.log" Dec 04 18:43:19 crc kubenswrapper[4631]: I1204 18:43:19.601030 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-57k7z_fdf92431-a279-4eb5-8e5d-56e353febcf2/kube-rbac-proxy/0.log" Dec 04 18:43:19 crc kubenswrapper[4631]: I1204 18:43:19.668666 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-57k7z_fdf92431-a279-4eb5-8e5d-56e353febcf2/manager/0.log" Dec 04 18:43:19 crc kubenswrapper[4631]: I1204 18:43:19.783770 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-2bzwd_b8212ac4-255e-4de2-ac13-0033682d7550/kube-rbac-proxy/0.log" Dec 04 18:43:19 crc kubenswrapper[4631]: I1204 18:43:19.888831 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-2bzwd_b8212ac4-255e-4de2-ac13-0033682d7550/manager/0.log" Dec 04 18:43:19 crc kubenswrapper[4631]: I1204 18:43:19.908727 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-xdm6x_f0089345-8234-4ea7-9fbe-528afe9d5fc0/kube-rbac-proxy/0.log" Dec 04 18:43:20 crc kubenswrapper[4631]: I1204 18:43:20.079061 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-xdm6x_f0089345-8234-4ea7-9fbe-528afe9d5fc0/manager/0.log" Dec 04 18:43:20 crc kubenswrapper[4631]: I1204 18:43:20.119022 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-sw9bk_72f35a0d-fa67-44c7-a25c-b720885d5708/manager/0.log" Dec 04 18:43:20 crc kubenswrapper[4631]: I1204 18:43:20.151017 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-sw9bk_72f35a0d-fa67-44c7-a25c-b720885d5708/kube-rbac-proxy/0.log" Dec 04 18:43:20 crc kubenswrapper[4631]: I1204 18:43:20.358909 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-tmr9d_76c2990b-dff1-4715-8517-28cff884cf12/kube-rbac-proxy/0.log" Dec 04 18:43:20 crc kubenswrapper[4631]: I1204 18:43:20.405533 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-tmr9d_76c2990b-dff1-4715-8517-28cff884cf12/manager/0.log" Dec 04 18:43:20 crc kubenswrapper[4631]: I1204 18:43:20.497745 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-2hbp2_41a5b9af-e0eb-46d8-84f0-0962dd72367c/kube-rbac-proxy/0.log" Dec 04 18:43:20 crc kubenswrapper[4631]: I1204 18:43:20.898464 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-czzvh_22b6958b-a18a-49c1-b6a4-28b3ebad0846/kube-rbac-proxy/0.log" Dec 04 18:43:20 crc kubenswrapper[4631]: I1204 18:43:20.952149 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-2hbp2_41a5b9af-e0eb-46d8-84f0-0962dd72367c/manager/0.log" Dec 04 18:43:21 crc kubenswrapper[4631]: I1204 18:43:21.101858 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-czzvh_22b6958b-a18a-49c1-b6a4-28b3ebad0846/manager/0.log" Dec 04 18:43:21 crc kubenswrapper[4631]: I1204 18:43:21.189286 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-66x65_becd7035-989e-497f-96ad-7eaa0d7e4456/kube-rbac-proxy/0.log" Dec 04 18:43:21 crc kubenswrapper[4631]: I1204 18:43:21.225956 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-66x65_becd7035-989e-497f-96ad-7eaa0d7e4456/manager/0.log" Dec 04 18:43:21 crc kubenswrapper[4631]: I1204 18:43:21.433213 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq_e20ca639-4732-4b27-b2e2-8d4cc9374515/kube-rbac-proxy/0.log" Dec 04 18:43:21 crc kubenswrapper[4631]: I1204 18:43:21.487840 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq_e20ca639-4732-4b27-b2e2-8d4cc9374515/manager/0.log" Dec 04 18:43:21 crc kubenswrapper[4631]: I1204 18:43:21.976704 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-66bcc8f984-lzslx_75ee8627-c453-43a3-a933-080907b850cc/operator/0.log" Dec 04 18:43:22 crc kubenswrapper[4631]: I1204 18:43:22.019017 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-c9xfp_f1c2db29-609d-4d06-bf5a-702536504419/registry-server/0.log" Dec 04 18:43:22 crc kubenswrapper[4631]: I1204 18:43:22.379574 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-xc5m4_1c44bc20-c171-4476-a959-9e31d9bbac58/kube-rbac-proxy/0.log" Dec 04 18:43:22 crc kubenswrapper[4631]: I1204 18:43:22.380511 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-xc5m4_1c44bc20-c171-4476-a959-9e31d9bbac58/manager/0.log" Dec 04 18:43:22 crc kubenswrapper[4631]: I1204 18:43:22.479512 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-df7fm_7e9e43c6-516b-4195-9d65-e6e80544bb7d/kube-rbac-proxy/0.log" Dec 04 18:43:22 crc kubenswrapper[4631]: I1204 18:43:22.669215 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-df7fm_7e9e43c6-516b-4195-9d65-e6e80544bb7d/manager/0.log" Dec 04 18:43:22 crc kubenswrapper[4631]: I1204 18:43:22.779686 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-ct9z5_8c4e7e86-5efa-4888-a717-2dcafc489144/operator/0.log" Dec 04 18:43:22 crc kubenswrapper[4631]: I1204 18:43:22.780080 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-f65bcfbd6-zphvh_8eceb916-5479-43f0-a3f4-75d0643adcab/manager/0.log" Dec 04 18:43:22 crc kubenswrapper[4631]: I1204 18:43:22.912316 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-grlw8_acce4f1e-311d-44da-aaf9-a2cddc75be35/kube-rbac-proxy/0.log" Dec 04 18:43:22 crc kubenswrapper[4631]: I1204 18:43:22.926464 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-grlw8_acce4f1e-311d-44da-aaf9-a2cddc75be35/manager/0.log" Dec 04 18:43:23 crc kubenswrapper[4631]: I1204 18:43:23.006214 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-f5lq4_f05ded75-e10d-41ed-921d-0ba118f3453d/kube-rbac-proxy/0.log" Dec 04 18:43:23 crc kubenswrapper[4631]: I1204 18:43:23.159838 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-f5lq4_f05ded75-e10d-41ed-921d-0ba118f3453d/manager/0.log" Dec 04 18:43:23 crc kubenswrapper[4631]: I1204 18:43:23.199695 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-r28bs_a19a11a8-a149-4b75-ab68-359723dcfbcb/kube-rbac-proxy/0.log" Dec 04 18:43:23 crc kubenswrapper[4631]: I1204 18:43:23.246340 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-r28bs_a19a11a8-a149-4b75-ab68-359723dcfbcb/manager/0.log" Dec 04 18:43:23 crc kubenswrapper[4631]: I1204 18:43:23.374059 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-bqs5m_7cf50b74-b958-4f66-aefc-2ad897abdec2/kube-rbac-proxy/0.log" Dec 04 18:43:23 crc kubenswrapper[4631]: I1204 18:43:23.441135 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-bqs5m_7cf50b74-b958-4f66-aefc-2ad897abdec2/manager/0.log" Dec 04 18:43:25 crc kubenswrapper[4631]: I1204 18:43:25.239575 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:43:25 crc kubenswrapper[4631]: E1204 18:43:25.240112 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:43:39 crc kubenswrapper[4631]: I1204 18:43:39.240079 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:43:39 crc kubenswrapper[4631]: E1204 18:43:39.240818 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:43:44 crc kubenswrapper[4631]: I1204 18:43:44.140302 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-gswpr_0f92bdbc-4785-44bf-a91c-88fe53b02d2a/control-plane-machine-set-operator/0.log" Dec 04 18:43:44 crc kubenswrapper[4631]: I1204 18:43:44.209914 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-pdgsq_9a38e196-88e0-4add-8e52-40b1d8eb79e9/kube-rbac-proxy/0.log" Dec 04 18:43:44 crc kubenswrapper[4631]: I1204 18:43:44.301750 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-pdgsq_9a38e196-88e0-4add-8e52-40b1d8eb79e9/machine-api-operator/0.log" Dec 04 18:43:51 crc kubenswrapper[4631]: I1204 18:43:51.240483 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:43:51 crc kubenswrapper[4631]: E1204 18:43:51.241151 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:43:57 crc kubenswrapper[4631]: I1204 18:43:57.026816 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-6djk2_ea64b959-aecd-46e1-b2a4-cde17cc753d8/cert-manager-controller/0.log" Dec 04 18:43:57 crc kubenswrapper[4631]: I1204 18:43:57.174909 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-5vb4n_5a3b7e1e-41ff-4029-b2cd-6dc6be40ae3d/cert-manager-cainjector/0.log" Dec 04 18:43:57 crc kubenswrapper[4631]: I1204 18:43:57.258085 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-tffkj_d558fa33-5875-4eb1-80ec-2f5726659b7e/cert-manager-webhook/0.log" Dec 04 18:44:03 crc kubenswrapper[4631]: I1204 18:44:03.238739 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:44:03 crc kubenswrapper[4631]: E1204 18:44:03.239632 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:44:13 crc kubenswrapper[4631]: I1204 18:44:13.227053 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-5cbf9_1bfeccd7-32aa-4315-96df-4d7df3f10767/nmstate-console-plugin/0.log" Dec 04 18:44:13 crc kubenswrapper[4631]: I1204 18:44:13.472429 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-dqwmp_a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9/nmstate-handler/0.log" Dec 04 18:44:13 crc kubenswrapper[4631]: I1204 18:44:13.549442 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-8pp22_061c2acd-2d43-420a-8c0f-d31fcd0b2d3e/kube-rbac-proxy/0.log" Dec 04 18:44:13 crc kubenswrapper[4631]: I1204 18:44:13.604205 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-8pp22_061c2acd-2d43-420a-8c0f-d31fcd0b2d3e/nmstate-metrics/0.log" Dec 04 18:44:13 crc kubenswrapper[4631]: I1204 18:44:13.773759 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-2b87p_f095f34f-aa8e-4f97-a34d-63fbc8722163/nmstate-operator/0.log" Dec 04 18:44:13 crc kubenswrapper[4631]: I1204 18:44:13.848164 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-kd9nq_6af82021-e54a-415b-963a-3e0ca6f7fd5c/nmstate-webhook/0.log" Dec 04 18:44:14 crc kubenswrapper[4631]: I1204 18:44:14.239746 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:44:14 crc kubenswrapper[4631]: E1204 18:44:14.240038 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:44:26 crc kubenswrapper[4631]: I1204 18:44:26.239520 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:44:26 crc kubenswrapper[4631]: E1204 18:44:26.241113 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:44:29 crc kubenswrapper[4631]: I1204 18:44:29.851744 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-cpxkk_7531a7c8-09d0-470e-b530-227bff4a6659/kube-rbac-proxy/0.log" Dec 04 18:44:29 crc kubenswrapper[4631]: I1204 18:44:29.958470 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-cpxkk_7531a7c8-09d0-470e-b530-227bff4a6659/controller/0.log" Dec 04 18:44:30 crc kubenswrapper[4631]: I1204 18:44:30.112074 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-frr-files/0.log" Dec 04 18:44:30 crc kubenswrapper[4631]: I1204 18:44:30.286330 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-metrics/0.log" Dec 04 18:44:30 crc kubenswrapper[4631]: I1204 18:44:30.296471 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-reloader/0.log" Dec 04 18:44:30 crc kubenswrapper[4631]: I1204 18:44:30.323803 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-frr-files/0.log" Dec 04 18:44:30 crc kubenswrapper[4631]: I1204 18:44:30.373109 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-reloader/0.log" Dec 04 18:44:30 crc kubenswrapper[4631]: I1204 18:44:30.563426 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-reloader/0.log" Dec 04 18:44:30 crc kubenswrapper[4631]: I1204 18:44:30.597549 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-metrics/0.log" Dec 04 18:44:30 crc kubenswrapper[4631]: I1204 18:44:30.617433 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-frr-files/0.log" Dec 04 18:44:30 crc kubenswrapper[4631]: I1204 18:44:30.637516 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-metrics/0.log" Dec 04 18:44:30 crc kubenswrapper[4631]: I1204 18:44:30.777740 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-reloader/0.log" Dec 04 18:44:30 crc kubenswrapper[4631]: I1204 18:44:30.813466 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-frr-files/0.log" Dec 04 18:44:30 crc kubenswrapper[4631]: I1204 18:44:30.831790 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-metrics/0.log" Dec 04 18:44:30 crc kubenswrapper[4631]: I1204 18:44:30.865688 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/controller/0.log" Dec 04 18:44:30 crc kubenswrapper[4631]: I1204 18:44:30.994656 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/kube-rbac-proxy/0.log" Dec 04 18:44:31 crc kubenswrapper[4631]: I1204 18:44:31.055405 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/frr-metrics/0.log" Dec 04 18:44:31 crc kubenswrapper[4631]: I1204 18:44:31.184171 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/kube-rbac-proxy-frr/0.log" Dec 04 18:44:31 crc kubenswrapper[4631]: I1204 18:44:31.278642 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/reloader/0.log" Dec 04 18:44:31 crc kubenswrapper[4631]: I1204 18:44:31.459828 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-rpshr_78c76818-4dc3-4a33-b105-f8194a1cde60/frr-k8s-webhook-server/0.log" Dec 04 18:44:31 crc kubenswrapper[4631]: I1204 18:44:31.613300 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-9d6f9bbbc-h6txj_7628937e-69d4-416b-bf62-0b8cb083c4b1/manager/0.log" Dec 04 18:44:31 crc kubenswrapper[4631]: I1204 18:44:31.838736 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6dfc44c866-psc9v_b471a575-aeaa-473d-a180-161a7c07d2af/webhook-server/0.log" Dec 04 18:44:32 crc kubenswrapper[4631]: I1204 18:44:32.103440 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-45ph8_025cf6e6-5d36-4973-bac3-7cd1046ddeea/kube-rbac-proxy/0.log" Dec 04 18:44:32 crc kubenswrapper[4631]: I1204 18:44:32.128138 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/frr/0.log" Dec 04 18:44:32 crc kubenswrapper[4631]: I1204 18:44:32.390054 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-45ph8_025cf6e6-5d36-4973-bac3-7cd1046ddeea/speaker/0.log" Dec 04 18:44:41 crc kubenswrapper[4631]: I1204 18:44:41.239712 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:44:41 crc kubenswrapper[4631]: E1204 18:44:41.240443 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:44:46 crc kubenswrapper[4631]: I1204 18:44:46.932851 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84_12f1788b-0cad-4272-9208-6ed4bd4d2ac0/util/0.log" Dec 04 18:44:47 crc kubenswrapper[4631]: I1204 18:44:47.263139 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84_12f1788b-0cad-4272-9208-6ed4bd4d2ac0/pull/0.log" Dec 04 18:44:47 crc kubenswrapper[4631]: I1204 18:44:47.292343 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84_12f1788b-0cad-4272-9208-6ed4bd4d2ac0/pull/0.log" Dec 04 18:44:47 crc kubenswrapper[4631]: I1204 18:44:47.350210 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84_12f1788b-0cad-4272-9208-6ed4bd4d2ac0/util/0.log" Dec 04 18:44:47 crc kubenswrapper[4631]: I1204 18:44:47.551983 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84_12f1788b-0cad-4272-9208-6ed4bd4d2ac0/extract/0.log" Dec 04 18:44:47 crc kubenswrapper[4631]: I1204 18:44:47.584785 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84_12f1788b-0cad-4272-9208-6ed4bd4d2ac0/pull/0.log" Dec 04 18:44:47 crc kubenswrapper[4631]: I1204 18:44:47.585070 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84_12f1788b-0cad-4272-9208-6ed4bd4d2ac0/util/0.log" Dec 04 18:44:47 crc kubenswrapper[4631]: I1204 18:44:47.844998 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28_104d954a-30c9-401b-8e56-817777e91f38/util/0.log" Dec 04 18:44:48 crc kubenswrapper[4631]: I1204 18:44:48.027592 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28_104d954a-30c9-401b-8e56-817777e91f38/pull/0.log" Dec 04 18:44:48 crc kubenswrapper[4631]: I1204 18:44:48.083983 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28_104d954a-30c9-401b-8e56-817777e91f38/pull/0.log" Dec 04 18:44:48 crc kubenswrapper[4631]: I1204 18:44:48.100617 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28_104d954a-30c9-401b-8e56-817777e91f38/util/0.log" Dec 04 18:44:48 crc kubenswrapper[4631]: I1204 18:44:48.282559 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28_104d954a-30c9-401b-8e56-817777e91f38/util/0.log" Dec 04 18:44:48 crc kubenswrapper[4631]: I1204 18:44:48.318267 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28_104d954a-30c9-401b-8e56-817777e91f38/pull/0.log" Dec 04 18:44:48 crc kubenswrapper[4631]: I1204 18:44:48.351431 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28_104d954a-30c9-401b-8e56-817777e91f38/extract/0.log" Dec 04 18:44:48 crc kubenswrapper[4631]: I1204 18:44:48.508325 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pxbcf_2cba6dd8-e8de-492c-b25f-f7092a13c72f/extract-utilities/0.log" Dec 04 18:44:48 crc kubenswrapper[4631]: I1204 18:44:48.775521 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pxbcf_2cba6dd8-e8de-492c-b25f-f7092a13c72f/extract-utilities/0.log" Dec 04 18:44:48 crc kubenswrapper[4631]: I1204 18:44:48.826854 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pxbcf_2cba6dd8-e8de-492c-b25f-f7092a13c72f/extract-content/0.log" Dec 04 18:44:48 crc kubenswrapper[4631]: I1204 18:44:48.844588 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pxbcf_2cba6dd8-e8de-492c-b25f-f7092a13c72f/extract-content/0.log" Dec 04 18:44:49 crc kubenswrapper[4631]: I1204 18:44:49.022192 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pxbcf_2cba6dd8-e8de-492c-b25f-f7092a13c72f/extract-utilities/0.log" Dec 04 18:44:49 crc kubenswrapper[4631]: I1204 18:44:49.067917 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pxbcf_2cba6dd8-e8de-492c-b25f-f7092a13c72f/extract-content/0.log" Dec 04 18:44:49 crc kubenswrapper[4631]: I1204 18:44:49.306903 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pxbcf_2cba6dd8-e8de-492c-b25f-f7092a13c72f/registry-server/0.log" Dec 04 18:44:49 crc kubenswrapper[4631]: I1204 18:44:49.334165 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db4qf_5267c3bf-4068-4178-9e3e-9a24e1c11a5e/extract-utilities/0.log" Dec 04 18:44:50 crc kubenswrapper[4631]: I1204 18:44:50.074473 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db4qf_5267c3bf-4068-4178-9e3e-9a24e1c11a5e/extract-content/0.log" Dec 04 18:44:50 crc kubenswrapper[4631]: I1204 18:44:50.092766 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db4qf_5267c3bf-4068-4178-9e3e-9a24e1c11a5e/extract-content/0.log" Dec 04 18:44:50 crc kubenswrapper[4631]: I1204 18:44:50.097920 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db4qf_5267c3bf-4068-4178-9e3e-9a24e1c11a5e/extract-utilities/0.log" Dec 04 18:44:50 crc kubenswrapper[4631]: I1204 18:44:50.581171 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db4qf_5267c3bf-4068-4178-9e3e-9a24e1c11a5e/extract-utilities/0.log" Dec 04 18:44:50 crc kubenswrapper[4631]: I1204 18:44:50.636006 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db4qf_5267c3bf-4068-4178-9e3e-9a24e1c11a5e/extract-content/0.log" Dec 04 18:44:50 crc kubenswrapper[4631]: I1204 18:44:50.850748 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-lpl45_68b9b122-03d1-41c7-8910-62826c1eedbb/marketplace-operator/0.log" Dec 04 18:44:51 crc kubenswrapper[4631]: I1204 18:44:51.029921 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tzprv_575a3ee8-f538-4c96-8067-564def2cc3ff/extract-utilities/0.log" Dec 04 18:44:51 crc kubenswrapper[4631]: I1204 18:44:51.248327 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tzprv_575a3ee8-f538-4c96-8067-564def2cc3ff/extract-content/0.log" Dec 04 18:44:51 crc kubenswrapper[4631]: I1204 18:44:51.309854 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tzprv_575a3ee8-f538-4c96-8067-564def2cc3ff/extract-utilities/0.log" Dec 04 18:44:51 crc kubenswrapper[4631]: I1204 18:44:51.369617 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db4qf_5267c3bf-4068-4178-9e3e-9a24e1c11a5e/registry-server/0.log" Dec 04 18:44:51 crc kubenswrapper[4631]: I1204 18:44:51.385154 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tzprv_575a3ee8-f538-4c96-8067-564def2cc3ff/extract-content/0.log" Dec 04 18:44:51 crc kubenswrapper[4631]: I1204 18:44:51.790286 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tzprv_575a3ee8-f538-4c96-8067-564def2cc3ff/extract-content/0.log" Dec 04 18:44:51 crc kubenswrapper[4631]: I1204 18:44:51.866935 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tzprv_575a3ee8-f538-4c96-8067-564def2cc3ff/extract-utilities/0.log" Dec 04 18:44:51 crc kubenswrapper[4631]: I1204 18:44:51.890813 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cfh7w_e28d58eb-8c1a-4e76-87fc-aefb35295f30/extract-utilities/0.log" Dec 04 18:44:51 crc kubenswrapper[4631]: I1204 18:44:51.997804 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tzprv_575a3ee8-f538-4c96-8067-564def2cc3ff/registry-server/0.log" Dec 04 18:44:52 crc kubenswrapper[4631]: I1204 18:44:52.129096 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cfh7w_e28d58eb-8c1a-4e76-87fc-aefb35295f30/extract-content/0.log" Dec 04 18:44:52 crc kubenswrapper[4631]: I1204 18:44:52.133261 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cfh7w_e28d58eb-8c1a-4e76-87fc-aefb35295f30/extract-utilities/0.log" Dec 04 18:44:52 crc kubenswrapper[4631]: I1204 18:44:52.176862 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cfh7w_e28d58eb-8c1a-4e76-87fc-aefb35295f30/extract-content/0.log" Dec 04 18:44:52 crc kubenswrapper[4631]: I1204 18:44:52.240083 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:44:52 crc kubenswrapper[4631]: E1204 18:44:52.240329 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:44:52 crc kubenswrapper[4631]: I1204 18:44:52.338809 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cfh7w_e28d58eb-8c1a-4e76-87fc-aefb35295f30/extract-content/0.log" Dec 04 18:44:52 crc kubenswrapper[4631]: I1204 18:44:52.354952 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cfh7w_e28d58eb-8c1a-4e76-87fc-aefb35295f30/extract-utilities/0.log" Dec 04 18:44:52 crc kubenswrapper[4631]: I1204 18:44:52.949499 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cfh7w_e28d58eb-8c1a-4e76-87fc-aefb35295f30/registry-server/0.log" Dec 04 18:45:00 crc kubenswrapper[4631]: I1204 18:45:00.169342 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf"] Dec 04 18:45:00 crc kubenswrapper[4631]: E1204 18:45:00.170360 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd01e760-fb82-4bd1-a675-e2e176a534c2" containerName="container-00" Dec 04 18:45:00 crc kubenswrapper[4631]: I1204 18:45:00.170394 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd01e760-fb82-4bd1-a675-e2e176a534c2" containerName="container-00" Dec 04 18:45:00 crc kubenswrapper[4631]: I1204 18:45:00.170568 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd01e760-fb82-4bd1-a675-e2e176a534c2" containerName="container-00" Dec 04 18:45:00 crc kubenswrapper[4631]: I1204 18:45:00.171170 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf" Dec 04 18:45:00 crc kubenswrapper[4631]: I1204 18:45:00.174120 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 04 18:45:00 crc kubenswrapper[4631]: I1204 18:45:00.175980 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 04 18:45:00 crc kubenswrapper[4631]: I1204 18:45:00.180169 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf"] Dec 04 18:45:00 crc kubenswrapper[4631]: I1204 18:45:00.238745 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d6c790e5-e9f3-418c-ba96-1d4834fcd53e-secret-volume\") pod \"collect-profiles-29414565-76cpf\" (UID: \"d6c790e5-e9f3-418c-ba96-1d4834fcd53e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf" Dec 04 18:45:00 crc kubenswrapper[4631]: I1204 18:45:00.239496 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d6c790e5-e9f3-418c-ba96-1d4834fcd53e-config-volume\") pod \"collect-profiles-29414565-76cpf\" (UID: \"d6c790e5-e9f3-418c-ba96-1d4834fcd53e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf" Dec 04 18:45:00 crc kubenswrapper[4631]: I1204 18:45:00.239816 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcnxs\" (UniqueName: \"kubernetes.io/projected/d6c790e5-e9f3-418c-ba96-1d4834fcd53e-kube-api-access-dcnxs\") pod \"collect-profiles-29414565-76cpf\" (UID: \"d6c790e5-e9f3-418c-ba96-1d4834fcd53e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf" Dec 04 18:45:00 crc kubenswrapper[4631]: I1204 18:45:00.341933 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d6c790e5-e9f3-418c-ba96-1d4834fcd53e-secret-volume\") pod \"collect-profiles-29414565-76cpf\" (UID: \"d6c790e5-e9f3-418c-ba96-1d4834fcd53e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf" Dec 04 18:45:00 crc kubenswrapper[4631]: I1204 18:45:00.342059 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d6c790e5-e9f3-418c-ba96-1d4834fcd53e-config-volume\") pod \"collect-profiles-29414565-76cpf\" (UID: \"d6c790e5-e9f3-418c-ba96-1d4834fcd53e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf" Dec 04 18:45:00 crc kubenswrapper[4631]: I1204 18:45:00.342143 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcnxs\" (UniqueName: \"kubernetes.io/projected/d6c790e5-e9f3-418c-ba96-1d4834fcd53e-kube-api-access-dcnxs\") pod \"collect-profiles-29414565-76cpf\" (UID: \"d6c790e5-e9f3-418c-ba96-1d4834fcd53e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf" Dec 04 18:45:00 crc kubenswrapper[4631]: I1204 18:45:00.343040 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d6c790e5-e9f3-418c-ba96-1d4834fcd53e-config-volume\") pod \"collect-profiles-29414565-76cpf\" (UID: \"d6c790e5-e9f3-418c-ba96-1d4834fcd53e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf" Dec 04 18:45:00 crc kubenswrapper[4631]: I1204 18:45:00.350916 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d6c790e5-e9f3-418c-ba96-1d4834fcd53e-secret-volume\") pod \"collect-profiles-29414565-76cpf\" (UID: \"d6c790e5-e9f3-418c-ba96-1d4834fcd53e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf" Dec 04 18:45:00 crc kubenswrapper[4631]: I1204 18:45:00.362278 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcnxs\" (UniqueName: \"kubernetes.io/projected/d6c790e5-e9f3-418c-ba96-1d4834fcd53e-kube-api-access-dcnxs\") pod \"collect-profiles-29414565-76cpf\" (UID: \"d6c790e5-e9f3-418c-ba96-1d4834fcd53e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf" Dec 04 18:45:00 crc kubenswrapper[4631]: I1204 18:45:00.489750 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf" Dec 04 18:45:01 crc kubenswrapper[4631]: I1204 18:45:01.253005 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf"] Dec 04 18:45:01 crc kubenswrapper[4631]: I1204 18:45:01.733085 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf" event={"ID":"d6c790e5-e9f3-418c-ba96-1d4834fcd53e","Type":"ContainerStarted","Data":"d0bace26a80f7517d9c71de8586bed73077e64fa3b0ad34fe7a67b18eac8d552"} Dec 04 18:45:01 crc kubenswrapper[4631]: I1204 18:45:01.733425 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf" event={"ID":"d6c790e5-e9f3-418c-ba96-1d4834fcd53e","Type":"ContainerStarted","Data":"b857a8f61fb92f041f1b4fa28d83406d2badcae0f9aa9501641afccc956678df"} Dec 04 18:45:02 crc kubenswrapper[4631]: I1204 18:45:02.743480 4631 generic.go:334] "Generic (PLEG): container finished" podID="d6c790e5-e9f3-418c-ba96-1d4834fcd53e" containerID="d0bace26a80f7517d9c71de8586bed73077e64fa3b0ad34fe7a67b18eac8d552" exitCode=0 Dec 04 18:45:02 crc kubenswrapper[4631]: I1204 18:45:02.743595 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf" event={"ID":"d6c790e5-e9f3-418c-ba96-1d4834fcd53e","Type":"ContainerDied","Data":"d0bace26a80f7517d9c71de8586bed73077e64fa3b0ad34fe7a67b18eac8d552"} Dec 04 18:45:03 crc kubenswrapper[4631]: I1204 18:45:03.241173 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:45:03 crc kubenswrapper[4631]: E1204 18:45:03.241430 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:45:04 crc kubenswrapper[4631]: I1204 18:45:04.102731 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf" Dec 04 18:45:04 crc kubenswrapper[4631]: I1204 18:45:04.210239 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d6c790e5-e9f3-418c-ba96-1d4834fcd53e-secret-volume\") pod \"d6c790e5-e9f3-418c-ba96-1d4834fcd53e\" (UID: \"d6c790e5-e9f3-418c-ba96-1d4834fcd53e\") " Dec 04 18:45:04 crc kubenswrapper[4631]: I1204 18:45:04.210319 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d6c790e5-e9f3-418c-ba96-1d4834fcd53e-config-volume\") pod \"d6c790e5-e9f3-418c-ba96-1d4834fcd53e\" (UID: \"d6c790e5-e9f3-418c-ba96-1d4834fcd53e\") " Dec 04 18:45:04 crc kubenswrapper[4631]: I1204 18:45:04.210402 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcnxs\" (UniqueName: \"kubernetes.io/projected/d6c790e5-e9f3-418c-ba96-1d4834fcd53e-kube-api-access-dcnxs\") pod \"d6c790e5-e9f3-418c-ba96-1d4834fcd53e\" (UID: \"d6c790e5-e9f3-418c-ba96-1d4834fcd53e\") " Dec 04 18:45:04 crc kubenswrapper[4631]: I1204 18:45:04.211053 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6c790e5-e9f3-418c-ba96-1d4834fcd53e-config-volume" (OuterVolumeSpecName: "config-volume") pod "d6c790e5-e9f3-418c-ba96-1d4834fcd53e" (UID: "d6c790e5-e9f3-418c-ba96-1d4834fcd53e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 18:45:04 crc kubenswrapper[4631]: I1204 18:45:04.215437 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6c790e5-e9f3-418c-ba96-1d4834fcd53e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d6c790e5-e9f3-418c-ba96-1d4834fcd53e" (UID: "d6c790e5-e9f3-418c-ba96-1d4834fcd53e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 18:45:04 crc kubenswrapper[4631]: I1204 18:45:04.228706 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6c790e5-e9f3-418c-ba96-1d4834fcd53e-kube-api-access-dcnxs" (OuterVolumeSpecName: "kube-api-access-dcnxs") pod "d6c790e5-e9f3-418c-ba96-1d4834fcd53e" (UID: "d6c790e5-e9f3-418c-ba96-1d4834fcd53e"). InnerVolumeSpecName "kube-api-access-dcnxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:45:04 crc kubenswrapper[4631]: I1204 18:45:04.312477 4631 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d6c790e5-e9f3-418c-ba96-1d4834fcd53e-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 04 18:45:04 crc kubenswrapper[4631]: I1204 18:45:04.312504 4631 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d6c790e5-e9f3-418c-ba96-1d4834fcd53e-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 18:45:04 crc kubenswrapper[4631]: I1204 18:45:04.312514 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcnxs\" (UniqueName: \"kubernetes.io/projected/d6c790e5-e9f3-418c-ba96-1d4834fcd53e-kube-api-access-dcnxs\") on node \"crc\" DevicePath \"\"" Dec 04 18:45:04 crc kubenswrapper[4631]: I1204 18:45:04.761529 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf" event={"ID":"d6c790e5-e9f3-418c-ba96-1d4834fcd53e","Type":"ContainerDied","Data":"b857a8f61fb92f041f1b4fa28d83406d2badcae0f9aa9501641afccc956678df"} Dec 04 18:45:04 crc kubenswrapper[4631]: I1204 18:45:04.761571 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b857a8f61fb92f041f1b4fa28d83406d2badcae0f9aa9501641afccc956678df" Dec 04 18:45:04 crc kubenswrapper[4631]: I1204 18:45:04.761624 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414565-76cpf" Dec 04 18:45:05 crc kubenswrapper[4631]: I1204 18:45:05.176399 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r"] Dec 04 18:45:05 crc kubenswrapper[4631]: I1204 18:45:05.188183 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414520-d8m4r"] Dec 04 18:45:06 crc kubenswrapper[4631]: I1204 18:45:06.249133 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c570a48-b033-455f-8a52-6169134b24ec" path="/var/lib/kubelet/pods/9c570a48-b033-455f-8a52-6169134b24ec/volumes" Dec 04 18:45:13 crc kubenswrapper[4631]: I1204 18:45:13.919801 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7mcqw"] Dec 04 18:45:13 crc kubenswrapper[4631]: E1204 18:45:13.920969 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6c790e5-e9f3-418c-ba96-1d4834fcd53e" containerName="collect-profiles" Dec 04 18:45:13 crc kubenswrapper[4631]: I1204 18:45:13.920983 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6c790e5-e9f3-418c-ba96-1d4834fcd53e" containerName="collect-profiles" Dec 04 18:45:13 crc kubenswrapper[4631]: I1204 18:45:13.921202 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6c790e5-e9f3-418c-ba96-1d4834fcd53e" containerName="collect-profiles" Dec 04 18:45:13 crc kubenswrapper[4631]: I1204 18:45:13.922522 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7mcqw" Dec 04 18:45:13 crc kubenswrapper[4631]: I1204 18:45:13.945364 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7mcqw"] Dec 04 18:45:13 crc kubenswrapper[4631]: I1204 18:45:13.980156 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/840817cd-9da0-41cd-84ad-fbce0c1b0013-catalog-content\") pod \"redhat-operators-7mcqw\" (UID: \"840817cd-9da0-41cd-84ad-fbce0c1b0013\") " pod="openshift-marketplace/redhat-operators-7mcqw" Dec 04 18:45:13 crc kubenswrapper[4631]: I1204 18:45:13.980215 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rc7f9\" (UniqueName: \"kubernetes.io/projected/840817cd-9da0-41cd-84ad-fbce0c1b0013-kube-api-access-rc7f9\") pod \"redhat-operators-7mcqw\" (UID: \"840817cd-9da0-41cd-84ad-fbce0c1b0013\") " pod="openshift-marketplace/redhat-operators-7mcqw" Dec 04 18:45:13 crc kubenswrapper[4631]: I1204 18:45:13.980421 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/840817cd-9da0-41cd-84ad-fbce0c1b0013-utilities\") pod \"redhat-operators-7mcqw\" (UID: \"840817cd-9da0-41cd-84ad-fbce0c1b0013\") " pod="openshift-marketplace/redhat-operators-7mcqw" Dec 04 18:45:14 crc kubenswrapper[4631]: I1204 18:45:14.082011 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/840817cd-9da0-41cd-84ad-fbce0c1b0013-utilities\") pod \"redhat-operators-7mcqw\" (UID: \"840817cd-9da0-41cd-84ad-fbce0c1b0013\") " pod="openshift-marketplace/redhat-operators-7mcqw" Dec 04 18:45:14 crc kubenswrapper[4631]: I1204 18:45:14.082085 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/840817cd-9da0-41cd-84ad-fbce0c1b0013-catalog-content\") pod \"redhat-operators-7mcqw\" (UID: \"840817cd-9da0-41cd-84ad-fbce0c1b0013\") " pod="openshift-marketplace/redhat-operators-7mcqw" Dec 04 18:45:14 crc kubenswrapper[4631]: I1204 18:45:14.082100 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rc7f9\" (UniqueName: \"kubernetes.io/projected/840817cd-9da0-41cd-84ad-fbce0c1b0013-kube-api-access-rc7f9\") pod \"redhat-operators-7mcqw\" (UID: \"840817cd-9da0-41cd-84ad-fbce0c1b0013\") " pod="openshift-marketplace/redhat-operators-7mcqw" Dec 04 18:45:14 crc kubenswrapper[4631]: I1204 18:45:14.082757 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/840817cd-9da0-41cd-84ad-fbce0c1b0013-catalog-content\") pod \"redhat-operators-7mcqw\" (UID: \"840817cd-9da0-41cd-84ad-fbce0c1b0013\") " pod="openshift-marketplace/redhat-operators-7mcqw" Dec 04 18:45:14 crc kubenswrapper[4631]: I1204 18:45:14.082849 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/840817cd-9da0-41cd-84ad-fbce0c1b0013-utilities\") pod \"redhat-operators-7mcqw\" (UID: \"840817cd-9da0-41cd-84ad-fbce0c1b0013\") " pod="openshift-marketplace/redhat-operators-7mcqw" Dec 04 18:45:14 crc kubenswrapper[4631]: I1204 18:45:14.110426 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rc7f9\" (UniqueName: \"kubernetes.io/projected/840817cd-9da0-41cd-84ad-fbce0c1b0013-kube-api-access-rc7f9\") pod \"redhat-operators-7mcqw\" (UID: \"840817cd-9da0-41cd-84ad-fbce0c1b0013\") " pod="openshift-marketplace/redhat-operators-7mcqw" Dec 04 18:45:14 crc kubenswrapper[4631]: I1204 18:45:14.261801 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7mcqw" Dec 04 18:45:15 crc kubenswrapper[4631]: I1204 18:45:15.064214 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7mcqw"] Dec 04 18:45:15 crc kubenswrapper[4631]: I1204 18:45:15.853064 4631 generic.go:334] "Generic (PLEG): container finished" podID="840817cd-9da0-41cd-84ad-fbce0c1b0013" containerID="a88fab6e57a2ec6b989e99b8d1262b5281cb6109f69216497cf458bedc32632a" exitCode=0 Dec 04 18:45:15 crc kubenswrapper[4631]: I1204 18:45:15.853435 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7mcqw" event={"ID":"840817cd-9da0-41cd-84ad-fbce0c1b0013","Type":"ContainerDied","Data":"a88fab6e57a2ec6b989e99b8d1262b5281cb6109f69216497cf458bedc32632a"} Dec 04 18:45:15 crc kubenswrapper[4631]: I1204 18:45:15.853464 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7mcqw" event={"ID":"840817cd-9da0-41cd-84ad-fbce0c1b0013","Type":"ContainerStarted","Data":"ab6175640d57839a2fb2428dca380f442ce5d11095054f8aea0b9ced88e6c0fc"} Dec 04 18:45:16 crc kubenswrapper[4631]: I1204 18:45:16.239937 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:45:16 crc kubenswrapper[4631]: I1204 18:45:16.912580 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"05839d8d0766fa33b7eecf4a64f629e489a79c0a60120c9d4e2c2360bc76ae5f"} Dec 04 18:45:17 crc kubenswrapper[4631]: I1204 18:45:17.921903 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7mcqw" event={"ID":"840817cd-9da0-41cd-84ad-fbce0c1b0013","Type":"ContainerStarted","Data":"f5bd8e29c7215e2868f1800922ec6191605040c3edb0e873956cd1edfcb7c543"} Dec 04 18:45:24 crc kubenswrapper[4631]: I1204 18:45:24.006612 4631 generic.go:334] "Generic (PLEG): container finished" podID="840817cd-9da0-41cd-84ad-fbce0c1b0013" containerID="f5bd8e29c7215e2868f1800922ec6191605040c3edb0e873956cd1edfcb7c543" exitCode=0 Dec 04 18:45:24 crc kubenswrapper[4631]: I1204 18:45:24.006681 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7mcqw" event={"ID":"840817cd-9da0-41cd-84ad-fbce0c1b0013","Type":"ContainerDied","Data":"f5bd8e29c7215e2868f1800922ec6191605040c3edb0e873956cd1edfcb7c543"} Dec 04 18:45:24 crc kubenswrapper[4631]: I1204 18:45:24.010602 4631 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 18:45:24 crc kubenswrapper[4631]: I1204 18:45:24.408449 4631 scope.go:117] "RemoveContainer" containerID="047716b357ef2073183c2b173b4627e76c07d0ae0c2c8c045306360b7a6d36dd" Dec 04 18:45:24 crc kubenswrapper[4631]: I1204 18:45:24.448605 4631 scope.go:117] "RemoveContainer" containerID="920958e77b829db9ca620eaec30971eb113fab1daba2ea489208546434e9068c" Dec 04 18:45:24 crc kubenswrapper[4631]: I1204 18:45:24.488484 4631 scope.go:117] "RemoveContainer" containerID="1a7baeae3fed22d9d151417fece3a6b9d71476a5c381fd2178a94f662873d319" Dec 04 18:45:24 crc kubenswrapper[4631]: I1204 18:45:24.572755 4631 scope.go:117] "RemoveContainer" containerID="c50bb4790bde5e4bb7f682bed99276e8a9af8c2bf5094d1121f33fc919444c52" Dec 04 18:45:25 crc kubenswrapper[4631]: I1204 18:45:25.017350 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7mcqw" event={"ID":"840817cd-9da0-41cd-84ad-fbce0c1b0013","Type":"ContainerStarted","Data":"b26225c492aa13a6b78e386f4626bb1a9b46b4b390df360368829d1c6b9ea226"} Dec 04 18:45:25 crc kubenswrapper[4631]: I1204 18:45:25.037745 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7mcqw" podStartSLOduration=3.466646258 podStartE2EDuration="12.037725633s" podCreationTimestamp="2025-12-04 18:45:13 +0000 UTC" firstStartedPulling="2025-12-04 18:45:15.877511475 +0000 UTC m=+4645.909753473" lastFinishedPulling="2025-12-04 18:45:24.44859085 +0000 UTC m=+4654.480832848" observedRunningTime="2025-12-04 18:45:25.035910781 +0000 UTC m=+4655.068152789" watchObservedRunningTime="2025-12-04 18:45:25.037725633 +0000 UTC m=+4655.069967631" Dec 04 18:45:26 crc kubenswrapper[4631]: E1204 18:45:26.012683 4631 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.194:44706->38.102.83.194:39691: write tcp 38.102.83.194:44706->38.102.83.194:39691: write: broken pipe Dec 04 18:45:34 crc kubenswrapper[4631]: I1204 18:45:34.262406 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7mcqw" Dec 04 18:45:34 crc kubenswrapper[4631]: I1204 18:45:34.262936 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7mcqw" Dec 04 18:45:35 crc kubenswrapper[4631]: I1204 18:45:35.309053 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7mcqw" podUID="840817cd-9da0-41cd-84ad-fbce0c1b0013" containerName="registry-server" probeResult="failure" output=< Dec 04 18:45:35 crc kubenswrapper[4631]: timeout: failed to connect service ":50051" within 1s Dec 04 18:45:35 crc kubenswrapper[4631]: > Dec 04 18:45:37 crc kubenswrapper[4631]: I1204 18:45:37.689407 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ddr82"] Dec 04 18:45:37 crc kubenswrapper[4631]: I1204 18:45:37.692697 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ddr82" Dec 04 18:45:37 crc kubenswrapper[4631]: I1204 18:45:37.716672 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ddr82"] Dec 04 18:45:37 crc kubenswrapper[4631]: I1204 18:45:37.767048 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dfb0bfa-a848-445f-b310-35e6aed71b3f-utilities\") pod \"certified-operators-ddr82\" (UID: \"0dfb0bfa-a848-445f-b310-35e6aed71b3f\") " pod="openshift-marketplace/certified-operators-ddr82" Dec 04 18:45:37 crc kubenswrapper[4631]: I1204 18:45:37.767150 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gr2b\" (UniqueName: \"kubernetes.io/projected/0dfb0bfa-a848-445f-b310-35e6aed71b3f-kube-api-access-8gr2b\") pod \"certified-operators-ddr82\" (UID: \"0dfb0bfa-a848-445f-b310-35e6aed71b3f\") " pod="openshift-marketplace/certified-operators-ddr82" Dec 04 18:45:37 crc kubenswrapper[4631]: I1204 18:45:37.767249 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dfb0bfa-a848-445f-b310-35e6aed71b3f-catalog-content\") pod \"certified-operators-ddr82\" (UID: \"0dfb0bfa-a848-445f-b310-35e6aed71b3f\") " pod="openshift-marketplace/certified-operators-ddr82" Dec 04 18:45:37 crc kubenswrapper[4631]: I1204 18:45:37.868382 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gr2b\" (UniqueName: \"kubernetes.io/projected/0dfb0bfa-a848-445f-b310-35e6aed71b3f-kube-api-access-8gr2b\") pod \"certified-operators-ddr82\" (UID: \"0dfb0bfa-a848-445f-b310-35e6aed71b3f\") " pod="openshift-marketplace/certified-operators-ddr82" Dec 04 18:45:37 crc kubenswrapper[4631]: I1204 18:45:37.868471 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dfb0bfa-a848-445f-b310-35e6aed71b3f-catalog-content\") pod \"certified-operators-ddr82\" (UID: \"0dfb0bfa-a848-445f-b310-35e6aed71b3f\") " pod="openshift-marketplace/certified-operators-ddr82" Dec 04 18:45:37 crc kubenswrapper[4631]: I1204 18:45:37.868554 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dfb0bfa-a848-445f-b310-35e6aed71b3f-utilities\") pod \"certified-operators-ddr82\" (UID: \"0dfb0bfa-a848-445f-b310-35e6aed71b3f\") " pod="openshift-marketplace/certified-operators-ddr82" Dec 04 18:45:37 crc kubenswrapper[4631]: I1204 18:45:37.869041 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dfb0bfa-a848-445f-b310-35e6aed71b3f-utilities\") pod \"certified-operators-ddr82\" (UID: \"0dfb0bfa-a848-445f-b310-35e6aed71b3f\") " pod="openshift-marketplace/certified-operators-ddr82" Dec 04 18:45:37 crc kubenswrapper[4631]: I1204 18:45:37.869101 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dfb0bfa-a848-445f-b310-35e6aed71b3f-catalog-content\") pod \"certified-operators-ddr82\" (UID: \"0dfb0bfa-a848-445f-b310-35e6aed71b3f\") " pod="openshift-marketplace/certified-operators-ddr82" Dec 04 18:45:37 crc kubenswrapper[4631]: I1204 18:45:37.895328 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gr2b\" (UniqueName: \"kubernetes.io/projected/0dfb0bfa-a848-445f-b310-35e6aed71b3f-kube-api-access-8gr2b\") pod \"certified-operators-ddr82\" (UID: \"0dfb0bfa-a848-445f-b310-35e6aed71b3f\") " pod="openshift-marketplace/certified-operators-ddr82" Dec 04 18:45:38 crc kubenswrapper[4631]: I1204 18:45:38.010363 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ddr82" Dec 04 18:45:38 crc kubenswrapper[4631]: I1204 18:45:38.612017 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ddr82"] Dec 04 18:45:39 crc kubenswrapper[4631]: I1204 18:45:39.162103 4631 generic.go:334] "Generic (PLEG): container finished" podID="0dfb0bfa-a848-445f-b310-35e6aed71b3f" containerID="790b74de86ef10d36dffc64a0d131df0e04ee1c8ad7d0faadf9813ff34234396" exitCode=0 Dec 04 18:45:39 crc kubenswrapper[4631]: I1204 18:45:39.162191 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ddr82" event={"ID":"0dfb0bfa-a848-445f-b310-35e6aed71b3f","Type":"ContainerDied","Data":"790b74de86ef10d36dffc64a0d131df0e04ee1c8ad7d0faadf9813ff34234396"} Dec 04 18:45:39 crc kubenswrapper[4631]: I1204 18:45:39.162481 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ddr82" event={"ID":"0dfb0bfa-a848-445f-b310-35e6aed71b3f","Type":"ContainerStarted","Data":"a9b9de908696d16057377613e8c11e88681b5d88118efe8413ce887abc25e271"} Dec 04 18:45:40 crc kubenswrapper[4631]: I1204 18:45:40.172848 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ddr82" event={"ID":"0dfb0bfa-a848-445f-b310-35e6aed71b3f","Type":"ContainerStarted","Data":"01441c34dab43e96f85f7183da668ddf46b376408d02816650edaa72c5328ce6"} Dec 04 18:45:41 crc kubenswrapper[4631]: I1204 18:45:41.183892 4631 generic.go:334] "Generic (PLEG): container finished" podID="0dfb0bfa-a848-445f-b310-35e6aed71b3f" containerID="01441c34dab43e96f85f7183da668ddf46b376408d02816650edaa72c5328ce6" exitCode=0 Dec 04 18:45:41 crc kubenswrapper[4631]: I1204 18:45:41.184006 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ddr82" event={"ID":"0dfb0bfa-a848-445f-b310-35e6aed71b3f","Type":"ContainerDied","Data":"01441c34dab43e96f85f7183da668ddf46b376408d02816650edaa72c5328ce6"} Dec 04 18:45:42 crc kubenswrapper[4631]: I1204 18:45:42.199519 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ddr82" event={"ID":"0dfb0bfa-a848-445f-b310-35e6aed71b3f","Type":"ContainerStarted","Data":"91c04c6672c4500aed77479364d30d0961f75464b8e1b5753398334b28fc8f34"} Dec 04 18:45:42 crc kubenswrapper[4631]: I1204 18:45:42.223638 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ddr82" podStartSLOduration=2.805626824 podStartE2EDuration="5.223624301s" podCreationTimestamp="2025-12-04 18:45:37 +0000 UTC" firstStartedPulling="2025-12-04 18:45:39.166343441 +0000 UTC m=+4669.198585429" lastFinishedPulling="2025-12-04 18:45:41.584340908 +0000 UTC m=+4671.616582906" observedRunningTime="2025-12-04 18:45:42.221406997 +0000 UTC m=+4672.253648995" watchObservedRunningTime="2025-12-04 18:45:42.223624301 +0000 UTC m=+4672.255866299" Dec 04 18:45:44 crc kubenswrapper[4631]: I1204 18:45:44.317316 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7mcqw" Dec 04 18:45:44 crc kubenswrapper[4631]: I1204 18:45:44.392575 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7mcqw" Dec 04 18:45:45 crc kubenswrapper[4631]: I1204 18:45:45.070988 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7mcqw"] Dec 04 18:45:46 crc kubenswrapper[4631]: I1204 18:45:46.234759 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7mcqw" podUID="840817cd-9da0-41cd-84ad-fbce0c1b0013" containerName="registry-server" containerID="cri-o://b26225c492aa13a6b78e386f4626bb1a9b46b4b390df360368829d1c6b9ea226" gracePeriod=2 Dec 04 18:45:47 crc kubenswrapper[4631]: I1204 18:45:47.253674 4631 generic.go:334] "Generic (PLEG): container finished" podID="840817cd-9da0-41cd-84ad-fbce0c1b0013" containerID="b26225c492aa13a6b78e386f4626bb1a9b46b4b390df360368829d1c6b9ea226" exitCode=0 Dec 04 18:45:47 crc kubenswrapper[4631]: I1204 18:45:47.253852 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7mcqw" event={"ID":"840817cd-9da0-41cd-84ad-fbce0c1b0013","Type":"ContainerDied","Data":"b26225c492aa13a6b78e386f4626bb1a9b46b4b390df360368829d1c6b9ea226"} Dec 04 18:45:47 crc kubenswrapper[4631]: I1204 18:45:47.255168 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7mcqw" event={"ID":"840817cd-9da0-41cd-84ad-fbce0c1b0013","Type":"ContainerDied","Data":"ab6175640d57839a2fb2428dca380f442ce5d11095054f8aea0b9ced88e6c0fc"} Dec 04 18:45:47 crc kubenswrapper[4631]: I1204 18:45:47.255304 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab6175640d57839a2fb2428dca380f442ce5d11095054f8aea0b9ced88e6c0fc" Dec 04 18:45:47 crc kubenswrapper[4631]: I1204 18:45:47.377543 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7mcqw" Dec 04 18:45:47 crc kubenswrapper[4631]: I1204 18:45:47.471336 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rc7f9\" (UniqueName: \"kubernetes.io/projected/840817cd-9da0-41cd-84ad-fbce0c1b0013-kube-api-access-rc7f9\") pod \"840817cd-9da0-41cd-84ad-fbce0c1b0013\" (UID: \"840817cd-9da0-41cd-84ad-fbce0c1b0013\") " Dec 04 18:45:47 crc kubenswrapper[4631]: I1204 18:45:47.471601 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/840817cd-9da0-41cd-84ad-fbce0c1b0013-utilities\") pod \"840817cd-9da0-41cd-84ad-fbce0c1b0013\" (UID: \"840817cd-9da0-41cd-84ad-fbce0c1b0013\") " Dec 04 18:45:47 crc kubenswrapper[4631]: I1204 18:45:47.471629 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/840817cd-9da0-41cd-84ad-fbce0c1b0013-catalog-content\") pod \"840817cd-9da0-41cd-84ad-fbce0c1b0013\" (UID: \"840817cd-9da0-41cd-84ad-fbce0c1b0013\") " Dec 04 18:45:47 crc kubenswrapper[4631]: I1204 18:45:47.473316 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/840817cd-9da0-41cd-84ad-fbce0c1b0013-utilities" (OuterVolumeSpecName: "utilities") pod "840817cd-9da0-41cd-84ad-fbce0c1b0013" (UID: "840817cd-9da0-41cd-84ad-fbce0c1b0013"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:45:47 crc kubenswrapper[4631]: I1204 18:45:47.487951 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/840817cd-9da0-41cd-84ad-fbce0c1b0013-kube-api-access-rc7f9" (OuterVolumeSpecName: "kube-api-access-rc7f9") pod "840817cd-9da0-41cd-84ad-fbce0c1b0013" (UID: "840817cd-9da0-41cd-84ad-fbce0c1b0013"). InnerVolumeSpecName "kube-api-access-rc7f9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:45:47 crc kubenswrapper[4631]: I1204 18:45:47.574907 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rc7f9\" (UniqueName: \"kubernetes.io/projected/840817cd-9da0-41cd-84ad-fbce0c1b0013-kube-api-access-rc7f9\") on node \"crc\" DevicePath \"\"" Dec 04 18:45:47 crc kubenswrapper[4631]: I1204 18:45:47.574948 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/840817cd-9da0-41cd-84ad-fbce0c1b0013-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:45:47 crc kubenswrapper[4631]: I1204 18:45:47.633644 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/840817cd-9da0-41cd-84ad-fbce0c1b0013-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "840817cd-9da0-41cd-84ad-fbce0c1b0013" (UID: "840817cd-9da0-41cd-84ad-fbce0c1b0013"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:45:47 crc kubenswrapper[4631]: I1204 18:45:47.676809 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/840817cd-9da0-41cd-84ad-fbce0c1b0013-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:45:48 crc kubenswrapper[4631]: I1204 18:45:48.011024 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ddr82" Dec 04 18:45:48 crc kubenswrapper[4631]: I1204 18:45:48.011703 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ddr82" Dec 04 18:45:48 crc kubenswrapper[4631]: I1204 18:45:48.090295 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ddr82" Dec 04 18:45:48 crc kubenswrapper[4631]: I1204 18:45:48.268462 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7mcqw" Dec 04 18:45:48 crc kubenswrapper[4631]: I1204 18:45:48.303106 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7mcqw"] Dec 04 18:45:48 crc kubenswrapper[4631]: I1204 18:45:48.315059 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7mcqw"] Dec 04 18:45:48 crc kubenswrapper[4631]: I1204 18:45:48.334190 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ddr82" Dec 04 18:45:50 crc kubenswrapper[4631]: I1204 18:45:50.282695 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="840817cd-9da0-41cd-84ad-fbce0c1b0013" path="/var/lib/kubelet/pods/840817cd-9da0-41cd-84ad-fbce0c1b0013/volumes" Dec 04 18:45:50 crc kubenswrapper[4631]: I1204 18:45:50.470422 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ddr82"] Dec 04 18:45:50 crc kubenswrapper[4631]: I1204 18:45:50.470647 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ddr82" podUID="0dfb0bfa-a848-445f-b310-35e6aed71b3f" containerName="registry-server" containerID="cri-o://91c04c6672c4500aed77479364d30d0961f75464b8e1b5753398334b28fc8f34" gracePeriod=2 Dec 04 18:45:51 crc kubenswrapper[4631]: I1204 18:45:51.301341 4631 generic.go:334] "Generic (PLEG): container finished" podID="0dfb0bfa-a848-445f-b310-35e6aed71b3f" containerID="91c04c6672c4500aed77479364d30d0961f75464b8e1b5753398334b28fc8f34" exitCode=0 Dec 04 18:45:51 crc kubenswrapper[4631]: I1204 18:45:51.301557 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ddr82" event={"ID":"0dfb0bfa-a848-445f-b310-35e6aed71b3f","Type":"ContainerDied","Data":"91c04c6672c4500aed77479364d30d0961f75464b8e1b5753398334b28fc8f34"} Dec 04 18:45:51 crc kubenswrapper[4631]: I1204 18:45:51.807349 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ddr82" Dec 04 18:45:51 crc kubenswrapper[4631]: I1204 18:45:51.854157 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dfb0bfa-a848-445f-b310-35e6aed71b3f-utilities\") pod \"0dfb0bfa-a848-445f-b310-35e6aed71b3f\" (UID: \"0dfb0bfa-a848-445f-b310-35e6aed71b3f\") " Dec 04 18:45:51 crc kubenswrapper[4631]: I1204 18:45:51.854282 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8gr2b\" (UniqueName: \"kubernetes.io/projected/0dfb0bfa-a848-445f-b310-35e6aed71b3f-kube-api-access-8gr2b\") pod \"0dfb0bfa-a848-445f-b310-35e6aed71b3f\" (UID: \"0dfb0bfa-a848-445f-b310-35e6aed71b3f\") " Dec 04 18:45:51 crc kubenswrapper[4631]: I1204 18:45:51.854305 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dfb0bfa-a848-445f-b310-35e6aed71b3f-catalog-content\") pod \"0dfb0bfa-a848-445f-b310-35e6aed71b3f\" (UID: \"0dfb0bfa-a848-445f-b310-35e6aed71b3f\") " Dec 04 18:45:51 crc kubenswrapper[4631]: I1204 18:45:51.855360 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dfb0bfa-a848-445f-b310-35e6aed71b3f-utilities" (OuterVolumeSpecName: "utilities") pod "0dfb0bfa-a848-445f-b310-35e6aed71b3f" (UID: "0dfb0bfa-a848-445f-b310-35e6aed71b3f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:45:51 crc kubenswrapper[4631]: I1204 18:45:51.866953 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0dfb0bfa-a848-445f-b310-35e6aed71b3f-kube-api-access-8gr2b" (OuterVolumeSpecName: "kube-api-access-8gr2b") pod "0dfb0bfa-a848-445f-b310-35e6aed71b3f" (UID: "0dfb0bfa-a848-445f-b310-35e6aed71b3f"). InnerVolumeSpecName "kube-api-access-8gr2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:45:51 crc kubenswrapper[4631]: I1204 18:45:51.921532 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dfb0bfa-a848-445f-b310-35e6aed71b3f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0dfb0bfa-a848-445f-b310-35e6aed71b3f" (UID: "0dfb0bfa-a848-445f-b310-35e6aed71b3f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:45:51 crc kubenswrapper[4631]: I1204 18:45:51.956001 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dfb0bfa-a848-445f-b310-35e6aed71b3f-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:45:51 crc kubenswrapper[4631]: I1204 18:45:51.956036 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8gr2b\" (UniqueName: \"kubernetes.io/projected/0dfb0bfa-a848-445f-b310-35e6aed71b3f-kube-api-access-8gr2b\") on node \"crc\" DevicePath \"\"" Dec 04 18:45:51 crc kubenswrapper[4631]: I1204 18:45:51.956052 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dfb0bfa-a848-445f-b310-35e6aed71b3f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:45:52 crc kubenswrapper[4631]: I1204 18:45:52.314799 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ddr82" event={"ID":"0dfb0bfa-a848-445f-b310-35e6aed71b3f","Type":"ContainerDied","Data":"a9b9de908696d16057377613e8c11e88681b5d88118efe8413ce887abc25e271"} Dec 04 18:45:52 crc kubenswrapper[4631]: I1204 18:45:52.314879 4631 scope.go:117] "RemoveContainer" containerID="91c04c6672c4500aed77479364d30d0961f75464b8e1b5753398334b28fc8f34" Dec 04 18:45:52 crc kubenswrapper[4631]: I1204 18:45:52.315135 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ddr82" Dec 04 18:45:52 crc kubenswrapper[4631]: I1204 18:45:52.352447 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ddr82"] Dec 04 18:45:52 crc kubenswrapper[4631]: I1204 18:45:52.352829 4631 scope.go:117] "RemoveContainer" containerID="01441c34dab43e96f85f7183da668ddf46b376408d02816650edaa72c5328ce6" Dec 04 18:45:52 crc kubenswrapper[4631]: I1204 18:45:52.363796 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ddr82"] Dec 04 18:45:52 crc kubenswrapper[4631]: I1204 18:45:52.379056 4631 scope.go:117] "RemoveContainer" containerID="790b74de86ef10d36dffc64a0d131df0e04ee1c8ad7d0faadf9813ff34234396" Dec 04 18:45:54 crc kubenswrapper[4631]: I1204 18:45:54.260434 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0dfb0bfa-a848-445f-b310-35e6aed71b3f" path="/var/lib/kubelet/pods/0dfb0bfa-a848-445f-b310-35e6aed71b3f/volumes" Dec 04 18:47:15 crc kubenswrapper[4631]: I1204 18:47:15.145615 4631 generic.go:334] "Generic (PLEG): container finished" podID="610e5420-ccf7-4deb-a457-c8fe0a7f3e0c" containerID="f35bc398fc7bdc912e75d0398e79cb32d29aa6d7cb2c800a96149f18b8dc32e6" exitCode=0 Dec 04 18:47:15 crc kubenswrapper[4631]: I1204 18:47:15.145785 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fmgjn/must-gather-86p5p" event={"ID":"610e5420-ccf7-4deb-a457-c8fe0a7f3e0c","Type":"ContainerDied","Data":"f35bc398fc7bdc912e75d0398e79cb32d29aa6d7cb2c800a96149f18b8dc32e6"} Dec 04 18:47:15 crc kubenswrapper[4631]: I1204 18:47:15.147671 4631 scope.go:117] "RemoveContainer" containerID="f35bc398fc7bdc912e75d0398e79cb32d29aa6d7cb2c800a96149f18b8dc32e6" Dec 04 18:47:15 crc kubenswrapper[4631]: I1204 18:47:15.533158 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-fmgjn_must-gather-86p5p_610e5420-ccf7-4deb-a457-c8fe0a7f3e0c/gather/0.log" Dec 04 18:47:24 crc kubenswrapper[4631]: I1204 18:47:24.521974 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-fmgjn/must-gather-86p5p"] Dec 04 18:47:24 crc kubenswrapper[4631]: I1204 18:47:24.523788 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-fmgjn/must-gather-86p5p" podUID="610e5420-ccf7-4deb-a457-c8fe0a7f3e0c" containerName="copy" containerID="cri-o://95c90911e38677c65e405379a8777221c1ff4ba1dc300ff57c9c7e6bb6f60111" gracePeriod=2 Dec 04 18:47:24 crc kubenswrapper[4631]: I1204 18:47:24.538876 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-fmgjn/must-gather-86p5p"] Dec 04 18:47:25 crc kubenswrapper[4631]: I1204 18:47:25.000762 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-fmgjn_must-gather-86p5p_610e5420-ccf7-4deb-a457-c8fe0a7f3e0c/copy/0.log" Dec 04 18:47:25 crc kubenswrapper[4631]: I1204 18:47:25.001476 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fmgjn/must-gather-86p5p" Dec 04 18:47:25 crc kubenswrapper[4631]: I1204 18:47:25.164856 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-798ck\" (UniqueName: \"kubernetes.io/projected/610e5420-ccf7-4deb-a457-c8fe0a7f3e0c-kube-api-access-798ck\") pod \"610e5420-ccf7-4deb-a457-c8fe0a7f3e0c\" (UID: \"610e5420-ccf7-4deb-a457-c8fe0a7f3e0c\") " Dec 04 18:47:25 crc kubenswrapper[4631]: I1204 18:47:25.165069 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/610e5420-ccf7-4deb-a457-c8fe0a7f3e0c-must-gather-output\") pod \"610e5420-ccf7-4deb-a457-c8fe0a7f3e0c\" (UID: \"610e5420-ccf7-4deb-a457-c8fe0a7f3e0c\") " Dec 04 18:47:25 crc kubenswrapper[4631]: I1204 18:47:25.192680 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/610e5420-ccf7-4deb-a457-c8fe0a7f3e0c-kube-api-access-798ck" (OuterVolumeSpecName: "kube-api-access-798ck") pod "610e5420-ccf7-4deb-a457-c8fe0a7f3e0c" (UID: "610e5420-ccf7-4deb-a457-c8fe0a7f3e0c"). InnerVolumeSpecName "kube-api-access-798ck". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:47:25 crc kubenswrapper[4631]: I1204 18:47:25.251544 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-fmgjn_must-gather-86p5p_610e5420-ccf7-4deb-a457-c8fe0a7f3e0c/copy/0.log" Dec 04 18:47:25 crc kubenswrapper[4631]: I1204 18:47:25.251884 4631 generic.go:334] "Generic (PLEG): container finished" podID="610e5420-ccf7-4deb-a457-c8fe0a7f3e0c" containerID="95c90911e38677c65e405379a8777221c1ff4ba1dc300ff57c9c7e6bb6f60111" exitCode=143 Dec 04 18:47:25 crc kubenswrapper[4631]: I1204 18:47:25.251933 4631 scope.go:117] "RemoveContainer" containerID="95c90911e38677c65e405379a8777221c1ff4ba1dc300ff57c9c7e6bb6f60111" Dec 04 18:47:25 crc kubenswrapper[4631]: I1204 18:47:25.252045 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fmgjn/must-gather-86p5p" Dec 04 18:47:25 crc kubenswrapper[4631]: I1204 18:47:25.267576 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-798ck\" (UniqueName: \"kubernetes.io/projected/610e5420-ccf7-4deb-a457-c8fe0a7f3e0c-kube-api-access-798ck\") on node \"crc\" DevicePath \"\"" Dec 04 18:47:25 crc kubenswrapper[4631]: I1204 18:47:25.276238 4631 scope.go:117] "RemoveContainer" containerID="f35bc398fc7bdc912e75d0398e79cb32d29aa6d7cb2c800a96149f18b8dc32e6" Dec 04 18:47:25 crc kubenswrapper[4631]: I1204 18:47:25.334912 4631 scope.go:117] "RemoveContainer" containerID="95c90911e38677c65e405379a8777221c1ff4ba1dc300ff57c9c7e6bb6f60111" Dec 04 18:47:25 crc kubenswrapper[4631]: E1204 18:47:25.339645 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95c90911e38677c65e405379a8777221c1ff4ba1dc300ff57c9c7e6bb6f60111\": container with ID starting with 95c90911e38677c65e405379a8777221c1ff4ba1dc300ff57c9c7e6bb6f60111 not found: ID does not exist" containerID="95c90911e38677c65e405379a8777221c1ff4ba1dc300ff57c9c7e6bb6f60111" Dec 04 18:47:25 crc kubenswrapper[4631]: I1204 18:47:25.339676 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95c90911e38677c65e405379a8777221c1ff4ba1dc300ff57c9c7e6bb6f60111"} err="failed to get container status \"95c90911e38677c65e405379a8777221c1ff4ba1dc300ff57c9c7e6bb6f60111\": rpc error: code = NotFound desc = could not find container \"95c90911e38677c65e405379a8777221c1ff4ba1dc300ff57c9c7e6bb6f60111\": container with ID starting with 95c90911e38677c65e405379a8777221c1ff4ba1dc300ff57c9c7e6bb6f60111 not found: ID does not exist" Dec 04 18:47:25 crc kubenswrapper[4631]: I1204 18:47:25.339697 4631 scope.go:117] "RemoveContainer" containerID="f35bc398fc7bdc912e75d0398e79cb32d29aa6d7cb2c800a96149f18b8dc32e6" Dec 04 18:47:25 crc kubenswrapper[4631]: E1204 18:47:25.341265 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f35bc398fc7bdc912e75d0398e79cb32d29aa6d7cb2c800a96149f18b8dc32e6\": container with ID starting with f35bc398fc7bdc912e75d0398e79cb32d29aa6d7cb2c800a96149f18b8dc32e6 not found: ID does not exist" containerID="f35bc398fc7bdc912e75d0398e79cb32d29aa6d7cb2c800a96149f18b8dc32e6" Dec 04 18:47:25 crc kubenswrapper[4631]: I1204 18:47:25.341292 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f35bc398fc7bdc912e75d0398e79cb32d29aa6d7cb2c800a96149f18b8dc32e6"} err="failed to get container status \"f35bc398fc7bdc912e75d0398e79cb32d29aa6d7cb2c800a96149f18b8dc32e6\": rpc error: code = NotFound desc = could not find container \"f35bc398fc7bdc912e75d0398e79cb32d29aa6d7cb2c800a96149f18b8dc32e6\": container with ID starting with f35bc398fc7bdc912e75d0398e79cb32d29aa6d7cb2c800a96149f18b8dc32e6 not found: ID does not exist" Dec 04 18:47:25 crc kubenswrapper[4631]: I1204 18:47:25.405120 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/610e5420-ccf7-4deb-a457-c8fe0a7f3e0c-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "610e5420-ccf7-4deb-a457-c8fe0a7f3e0c" (UID: "610e5420-ccf7-4deb-a457-c8fe0a7f3e0c"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:47:25 crc kubenswrapper[4631]: I1204 18:47:25.471512 4631 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/610e5420-ccf7-4deb-a457-c8fe0a7f3e0c-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 04 18:47:26 crc kubenswrapper[4631]: I1204 18:47:26.250700 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="610e5420-ccf7-4deb-a457-c8fe0a7f3e0c" path="/var/lib/kubelet/pods/610e5420-ccf7-4deb-a457-c8fe0a7f3e0c/volumes" Dec 04 18:47:36 crc kubenswrapper[4631]: I1204 18:47:36.023969 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:47:36 crc kubenswrapper[4631]: I1204 18:47:36.024589 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:48:06 crc kubenswrapper[4631]: I1204 18:48:06.023554 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:48:06 crc kubenswrapper[4631]: I1204 18:48:06.024474 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:48:36 crc kubenswrapper[4631]: I1204 18:48:36.022583 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:48:36 crc kubenswrapper[4631]: I1204 18:48:36.023093 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:48:36 crc kubenswrapper[4631]: I1204 18:48:36.023165 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 18:48:36 crc kubenswrapper[4631]: I1204 18:48:36.024190 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"05839d8d0766fa33b7eecf4a64f629e489a79c0a60120c9d4e2c2360bc76ae5f"} pod="openshift-machine-config-operator/machine-config-daemon-q27wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 18:48:36 crc kubenswrapper[4631]: I1204 18:48:36.024282 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" containerID="cri-o://05839d8d0766fa33b7eecf4a64f629e489a79c0a60120c9d4e2c2360bc76ae5f" gracePeriod=600 Dec 04 18:48:36 crc kubenswrapper[4631]: I1204 18:48:36.935707 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerID="05839d8d0766fa33b7eecf4a64f629e489a79c0a60120c9d4e2c2360bc76ae5f" exitCode=0 Dec 04 18:48:36 crc kubenswrapper[4631]: I1204 18:48:36.935808 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerDied","Data":"05839d8d0766fa33b7eecf4a64f629e489a79c0a60120c9d4e2c2360bc76ae5f"} Dec 04 18:48:36 crc kubenswrapper[4631]: I1204 18:48:36.936284 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149"} Dec 04 18:48:36 crc kubenswrapper[4631]: I1204 18:48:36.936309 4631 scope.go:117] "RemoveContainer" containerID="f68d39164b8bcdad11c6a06a4ace7279807e911e011bd61e86e08c16adba49ef" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.707527 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xqqhh"] Dec 04 18:49:24 crc kubenswrapper[4631]: E1204 18:49:24.709613 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dfb0bfa-a848-445f-b310-35e6aed71b3f" containerName="extract-utilities" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.709706 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dfb0bfa-a848-445f-b310-35e6aed71b3f" containerName="extract-utilities" Dec 04 18:49:24 crc kubenswrapper[4631]: E1204 18:49:24.709785 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="610e5420-ccf7-4deb-a457-c8fe0a7f3e0c" containerName="copy" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.709873 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="610e5420-ccf7-4deb-a457-c8fe0a7f3e0c" containerName="copy" Dec 04 18:49:24 crc kubenswrapper[4631]: E1204 18:49:24.709950 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="610e5420-ccf7-4deb-a457-c8fe0a7f3e0c" containerName="gather" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.710017 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="610e5420-ccf7-4deb-a457-c8fe0a7f3e0c" containerName="gather" Dec 04 18:49:24 crc kubenswrapper[4631]: E1204 18:49:24.710112 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="840817cd-9da0-41cd-84ad-fbce0c1b0013" containerName="registry-server" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.710173 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="840817cd-9da0-41cd-84ad-fbce0c1b0013" containerName="registry-server" Dec 04 18:49:24 crc kubenswrapper[4631]: E1204 18:49:24.710235 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dfb0bfa-a848-445f-b310-35e6aed71b3f" containerName="extract-content" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.710299 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dfb0bfa-a848-445f-b310-35e6aed71b3f" containerName="extract-content" Dec 04 18:49:24 crc kubenswrapper[4631]: E1204 18:49:24.710361 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="840817cd-9da0-41cd-84ad-fbce0c1b0013" containerName="extract-content" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.710445 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="840817cd-9da0-41cd-84ad-fbce0c1b0013" containerName="extract-content" Dec 04 18:49:24 crc kubenswrapper[4631]: E1204 18:49:24.710530 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dfb0bfa-a848-445f-b310-35e6aed71b3f" containerName="registry-server" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.710595 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dfb0bfa-a848-445f-b310-35e6aed71b3f" containerName="registry-server" Dec 04 18:49:24 crc kubenswrapper[4631]: E1204 18:49:24.710660 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="840817cd-9da0-41cd-84ad-fbce0c1b0013" containerName="extract-utilities" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.710722 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="840817cd-9da0-41cd-84ad-fbce0c1b0013" containerName="extract-utilities" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.710967 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="610e5420-ccf7-4deb-a457-c8fe0a7f3e0c" containerName="gather" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.711037 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="840817cd-9da0-41cd-84ad-fbce0c1b0013" containerName="registry-server" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.711111 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="610e5420-ccf7-4deb-a457-c8fe0a7f3e0c" containerName="copy" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.711178 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="0dfb0bfa-a848-445f-b310-35e6aed71b3f" containerName="registry-server" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.712611 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xqqhh" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.720695 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xqqhh"] Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.760131 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37ef6884-efe8-4407-b22a-44d08a285acd-catalog-content\") pod \"redhat-marketplace-xqqhh\" (UID: \"37ef6884-efe8-4407-b22a-44d08a285acd\") " pod="openshift-marketplace/redhat-marketplace-xqqhh" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.760318 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpskf\" (UniqueName: \"kubernetes.io/projected/37ef6884-efe8-4407-b22a-44d08a285acd-kube-api-access-mpskf\") pod \"redhat-marketplace-xqqhh\" (UID: \"37ef6884-efe8-4407-b22a-44d08a285acd\") " pod="openshift-marketplace/redhat-marketplace-xqqhh" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.760404 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37ef6884-efe8-4407-b22a-44d08a285acd-utilities\") pod \"redhat-marketplace-xqqhh\" (UID: \"37ef6884-efe8-4407-b22a-44d08a285acd\") " pod="openshift-marketplace/redhat-marketplace-xqqhh" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.861334 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpskf\" (UniqueName: \"kubernetes.io/projected/37ef6884-efe8-4407-b22a-44d08a285acd-kube-api-access-mpskf\") pod \"redhat-marketplace-xqqhh\" (UID: \"37ef6884-efe8-4407-b22a-44d08a285acd\") " pod="openshift-marketplace/redhat-marketplace-xqqhh" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.861653 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37ef6884-efe8-4407-b22a-44d08a285acd-utilities\") pod \"redhat-marketplace-xqqhh\" (UID: \"37ef6884-efe8-4407-b22a-44d08a285acd\") " pod="openshift-marketplace/redhat-marketplace-xqqhh" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.861773 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37ef6884-efe8-4407-b22a-44d08a285acd-catalog-content\") pod \"redhat-marketplace-xqqhh\" (UID: \"37ef6884-efe8-4407-b22a-44d08a285acd\") " pod="openshift-marketplace/redhat-marketplace-xqqhh" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.862974 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37ef6884-efe8-4407-b22a-44d08a285acd-catalog-content\") pod \"redhat-marketplace-xqqhh\" (UID: \"37ef6884-efe8-4407-b22a-44d08a285acd\") " pod="openshift-marketplace/redhat-marketplace-xqqhh" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.864309 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37ef6884-efe8-4407-b22a-44d08a285acd-utilities\") pod \"redhat-marketplace-xqqhh\" (UID: \"37ef6884-efe8-4407-b22a-44d08a285acd\") " pod="openshift-marketplace/redhat-marketplace-xqqhh" Dec 04 18:49:24 crc kubenswrapper[4631]: I1204 18:49:24.888216 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpskf\" (UniqueName: \"kubernetes.io/projected/37ef6884-efe8-4407-b22a-44d08a285acd-kube-api-access-mpskf\") pod \"redhat-marketplace-xqqhh\" (UID: \"37ef6884-efe8-4407-b22a-44d08a285acd\") " pod="openshift-marketplace/redhat-marketplace-xqqhh" Dec 04 18:49:25 crc kubenswrapper[4631]: I1204 18:49:25.051355 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xqqhh" Dec 04 18:49:25 crc kubenswrapper[4631]: I1204 18:49:25.544196 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xqqhh"] Dec 04 18:49:26 crc kubenswrapper[4631]: I1204 18:49:26.416433 4631 generic.go:334] "Generic (PLEG): container finished" podID="37ef6884-efe8-4407-b22a-44d08a285acd" containerID="106b7eedfb175771d3cfe292ecaa6a1e432106270b98bee171e929b13ecee871" exitCode=0 Dec 04 18:49:26 crc kubenswrapper[4631]: I1204 18:49:26.416539 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xqqhh" event={"ID":"37ef6884-efe8-4407-b22a-44d08a285acd","Type":"ContainerDied","Data":"106b7eedfb175771d3cfe292ecaa6a1e432106270b98bee171e929b13ecee871"} Dec 04 18:49:26 crc kubenswrapper[4631]: I1204 18:49:26.416757 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xqqhh" event={"ID":"37ef6884-efe8-4407-b22a-44d08a285acd","Type":"ContainerStarted","Data":"f5c3995c297c7b82d7b7705da65bc9231591ec597d5c1081e20ef5d3de4c3855"} Dec 04 18:49:27 crc kubenswrapper[4631]: I1204 18:49:27.102615 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dqrwz"] Dec 04 18:49:27 crc kubenswrapper[4631]: I1204 18:49:27.108236 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dqrwz" Dec 04 18:49:27 crc kubenswrapper[4631]: I1204 18:49:27.112961 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dqrwz"] Dec 04 18:49:27 crc kubenswrapper[4631]: I1204 18:49:27.307259 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwrjz\" (UniqueName: \"kubernetes.io/projected/b378f30f-ad8a-41b5-98b1-1f55525c54bb-kube-api-access-wwrjz\") pod \"community-operators-dqrwz\" (UID: \"b378f30f-ad8a-41b5-98b1-1f55525c54bb\") " pod="openshift-marketplace/community-operators-dqrwz" Dec 04 18:49:27 crc kubenswrapper[4631]: I1204 18:49:27.307311 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b378f30f-ad8a-41b5-98b1-1f55525c54bb-utilities\") pod \"community-operators-dqrwz\" (UID: \"b378f30f-ad8a-41b5-98b1-1f55525c54bb\") " pod="openshift-marketplace/community-operators-dqrwz" Dec 04 18:49:27 crc kubenswrapper[4631]: I1204 18:49:27.307395 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b378f30f-ad8a-41b5-98b1-1f55525c54bb-catalog-content\") pod \"community-operators-dqrwz\" (UID: \"b378f30f-ad8a-41b5-98b1-1f55525c54bb\") " pod="openshift-marketplace/community-operators-dqrwz" Dec 04 18:49:27 crc kubenswrapper[4631]: I1204 18:49:27.409003 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwrjz\" (UniqueName: \"kubernetes.io/projected/b378f30f-ad8a-41b5-98b1-1f55525c54bb-kube-api-access-wwrjz\") pod \"community-operators-dqrwz\" (UID: \"b378f30f-ad8a-41b5-98b1-1f55525c54bb\") " pod="openshift-marketplace/community-operators-dqrwz" Dec 04 18:49:27 crc kubenswrapper[4631]: I1204 18:49:27.409066 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b378f30f-ad8a-41b5-98b1-1f55525c54bb-utilities\") pod \"community-operators-dqrwz\" (UID: \"b378f30f-ad8a-41b5-98b1-1f55525c54bb\") " pod="openshift-marketplace/community-operators-dqrwz" Dec 04 18:49:27 crc kubenswrapper[4631]: I1204 18:49:27.409118 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b378f30f-ad8a-41b5-98b1-1f55525c54bb-catalog-content\") pod \"community-operators-dqrwz\" (UID: \"b378f30f-ad8a-41b5-98b1-1f55525c54bb\") " pod="openshift-marketplace/community-operators-dqrwz" Dec 04 18:49:27 crc kubenswrapper[4631]: I1204 18:49:27.409583 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b378f30f-ad8a-41b5-98b1-1f55525c54bb-utilities\") pod \"community-operators-dqrwz\" (UID: \"b378f30f-ad8a-41b5-98b1-1f55525c54bb\") " pod="openshift-marketplace/community-operators-dqrwz" Dec 04 18:49:27 crc kubenswrapper[4631]: I1204 18:49:27.409636 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b378f30f-ad8a-41b5-98b1-1f55525c54bb-catalog-content\") pod \"community-operators-dqrwz\" (UID: \"b378f30f-ad8a-41b5-98b1-1f55525c54bb\") " pod="openshift-marketplace/community-operators-dqrwz" Dec 04 18:49:27 crc kubenswrapper[4631]: I1204 18:49:27.429664 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xqqhh" event={"ID":"37ef6884-efe8-4407-b22a-44d08a285acd","Type":"ContainerStarted","Data":"34491aba629468e9ba9018b7ed558fb492c53fdba1fc053176bf6efaa01820dc"} Dec 04 18:49:27 crc kubenswrapper[4631]: I1204 18:49:27.433042 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwrjz\" (UniqueName: \"kubernetes.io/projected/b378f30f-ad8a-41b5-98b1-1f55525c54bb-kube-api-access-wwrjz\") pod \"community-operators-dqrwz\" (UID: \"b378f30f-ad8a-41b5-98b1-1f55525c54bb\") " pod="openshift-marketplace/community-operators-dqrwz" Dec 04 18:49:27 crc kubenswrapper[4631]: I1204 18:49:27.461909 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dqrwz" Dec 04 18:49:28 crc kubenswrapper[4631]: I1204 18:49:28.085333 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dqrwz"] Dec 04 18:49:28 crc kubenswrapper[4631]: I1204 18:49:28.438974 4631 generic.go:334] "Generic (PLEG): container finished" podID="b378f30f-ad8a-41b5-98b1-1f55525c54bb" containerID="6edf0490020b718ffa08eac60b2787657f74115a28168528eaf39b4bc83b94b7" exitCode=0 Dec 04 18:49:28 crc kubenswrapper[4631]: I1204 18:49:28.439091 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqrwz" event={"ID":"b378f30f-ad8a-41b5-98b1-1f55525c54bb","Type":"ContainerDied","Data":"6edf0490020b718ffa08eac60b2787657f74115a28168528eaf39b4bc83b94b7"} Dec 04 18:49:28 crc kubenswrapper[4631]: I1204 18:49:28.439291 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqrwz" event={"ID":"b378f30f-ad8a-41b5-98b1-1f55525c54bb","Type":"ContainerStarted","Data":"3fd4a9ace20c45d65407bfe165bcd0072b4357c7a0a6c88b921df9c2a1e6c4c1"} Dec 04 18:49:28 crc kubenswrapper[4631]: I1204 18:49:28.444716 4631 generic.go:334] "Generic (PLEG): container finished" podID="37ef6884-efe8-4407-b22a-44d08a285acd" containerID="34491aba629468e9ba9018b7ed558fb492c53fdba1fc053176bf6efaa01820dc" exitCode=0 Dec 04 18:49:28 crc kubenswrapper[4631]: I1204 18:49:28.444761 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xqqhh" event={"ID":"37ef6884-efe8-4407-b22a-44d08a285acd","Type":"ContainerDied","Data":"34491aba629468e9ba9018b7ed558fb492c53fdba1fc053176bf6efaa01820dc"} Dec 04 18:49:29 crc kubenswrapper[4631]: I1204 18:49:29.455698 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqrwz" event={"ID":"b378f30f-ad8a-41b5-98b1-1f55525c54bb","Type":"ContainerStarted","Data":"e659f2d957428b4aebd1d011a587e818f1686fe031246aeda01c02f9fc617c3d"} Dec 04 18:49:29 crc kubenswrapper[4631]: I1204 18:49:29.460120 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xqqhh" event={"ID":"37ef6884-efe8-4407-b22a-44d08a285acd","Type":"ContainerStarted","Data":"e0195a4ef505194525e53b5e82b9eb8173a203f0523cd397062753c89a6f3178"} Dec 04 18:49:29 crc kubenswrapper[4631]: I1204 18:49:29.504228 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xqqhh" podStartSLOduration=3.053905908 podStartE2EDuration="5.50420616s" podCreationTimestamp="2025-12-04 18:49:24 +0000 UTC" firstStartedPulling="2025-12-04 18:49:26.419141116 +0000 UTC m=+4896.451383144" lastFinishedPulling="2025-12-04 18:49:28.869441388 +0000 UTC m=+4898.901683396" observedRunningTime="2025-12-04 18:49:29.498024464 +0000 UTC m=+4899.530266492" watchObservedRunningTime="2025-12-04 18:49:29.50420616 +0000 UTC m=+4899.536448178" Dec 04 18:49:30 crc kubenswrapper[4631]: I1204 18:49:30.472292 4631 generic.go:334] "Generic (PLEG): container finished" podID="b378f30f-ad8a-41b5-98b1-1f55525c54bb" containerID="e659f2d957428b4aebd1d011a587e818f1686fe031246aeda01c02f9fc617c3d" exitCode=0 Dec 04 18:49:30 crc kubenswrapper[4631]: I1204 18:49:30.472402 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqrwz" event={"ID":"b378f30f-ad8a-41b5-98b1-1f55525c54bb","Type":"ContainerDied","Data":"e659f2d957428b4aebd1d011a587e818f1686fe031246aeda01c02f9fc617c3d"} Dec 04 18:49:31 crc kubenswrapper[4631]: I1204 18:49:31.483074 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqrwz" event={"ID":"b378f30f-ad8a-41b5-98b1-1f55525c54bb","Type":"ContainerStarted","Data":"43df3a2b75a065828a207b34832cce38ee721e6e44244ab79428b43c91d0f29b"} Dec 04 18:49:31 crc kubenswrapper[4631]: I1204 18:49:31.511664 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dqrwz" podStartSLOduration=2.073100794 podStartE2EDuration="4.511649681s" podCreationTimestamp="2025-12-04 18:49:27 +0000 UTC" firstStartedPulling="2025-12-04 18:49:28.440571545 +0000 UTC m=+4898.472813543" lastFinishedPulling="2025-12-04 18:49:30.879120422 +0000 UTC m=+4900.911362430" observedRunningTime="2025-12-04 18:49:31.500426501 +0000 UTC m=+4901.532668509" watchObservedRunningTime="2025-12-04 18:49:31.511649681 +0000 UTC m=+4901.543891679" Dec 04 18:49:35 crc kubenswrapper[4631]: I1204 18:49:35.051915 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xqqhh" Dec 04 18:49:35 crc kubenswrapper[4631]: I1204 18:49:35.052466 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xqqhh" Dec 04 18:49:35 crc kubenswrapper[4631]: I1204 18:49:35.113492 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xqqhh" Dec 04 18:49:35 crc kubenswrapper[4631]: I1204 18:49:35.570678 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xqqhh" Dec 04 18:49:35 crc kubenswrapper[4631]: I1204 18:49:35.878538 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xqqhh"] Dec 04 18:49:37 crc kubenswrapper[4631]: I1204 18:49:37.463247 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dqrwz" Dec 04 18:49:37 crc kubenswrapper[4631]: I1204 18:49:37.463584 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dqrwz" Dec 04 18:49:37 crc kubenswrapper[4631]: I1204 18:49:37.518764 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dqrwz" Dec 04 18:49:37 crc kubenswrapper[4631]: I1204 18:49:37.536269 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xqqhh" podUID="37ef6884-efe8-4407-b22a-44d08a285acd" containerName="registry-server" containerID="cri-o://e0195a4ef505194525e53b5e82b9eb8173a203f0523cd397062753c89a6f3178" gracePeriod=2 Dec 04 18:49:37 crc kubenswrapper[4631]: I1204 18:49:37.597803 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dqrwz" Dec 04 18:49:37 crc kubenswrapper[4631]: I1204 18:49:37.989658 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xqqhh" Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.119856 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37ef6884-efe8-4407-b22a-44d08a285acd-utilities\") pod \"37ef6884-efe8-4407-b22a-44d08a285acd\" (UID: \"37ef6884-efe8-4407-b22a-44d08a285acd\") " Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.119926 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37ef6884-efe8-4407-b22a-44d08a285acd-catalog-content\") pod \"37ef6884-efe8-4407-b22a-44d08a285acd\" (UID: \"37ef6884-efe8-4407-b22a-44d08a285acd\") " Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.120000 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mpskf\" (UniqueName: \"kubernetes.io/projected/37ef6884-efe8-4407-b22a-44d08a285acd-kube-api-access-mpskf\") pod \"37ef6884-efe8-4407-b22a-44d08a285acd\" (UID: \"37ef6884-efe8-4407-b22a-44d08a285acd\") " Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.120933 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37ef6884-efe8-4407-b22a-44d08a285acd-utilities" (OuterVolumeSpecName: "utilities") pod "37ef6884-efe8-4407-b22a-44d08a285acd" (UID: "37ef6884-efe8-4407-b22a-44d08a285acd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.127572 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37ef6884-efe8-4407-b22a-44d08a285acd-kube-api-access-mpskf" (OuterVolumeSpecName: "kube-api-access-mpskf") pod "37ef6884-efe8-4407-b22a-44d08a285acd" (UID: "37ef6884-efe8-4407-b22a-44d08a285acd"). InnerVolumeSpecName "kube-api-access-mpskf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.149172 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37ef6884-efe8-4407-b22a-44d08a285acd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "37ef6884-efe8-4407-b22a-44d08a285acd" (UID: "37ef6884-efe8-4407-b22a-44d08a285acd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.221981 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37ef6884-efe8-4407-b22a-44d08a285acd-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.222014 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37ef6884-efe8-4407-b22a-44d08a285acd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.222026 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mpskf\" (UniqueName: \"kubernetes.io/projected/37ef6884-efe8-4407-b22a-44d08a285acd-kube-api-access-mpskf\") on node \"crc\" DevicePath \"\"" Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.482187 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dqrwz"] Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.545434 4631 generic.go:334] "Generic (PLEG): container finished" podID="37ef6884-efe8-4407-b22a-44d08a285acd" containerID="e0195a4ef505194525e53b5e82b9eb8173a203f0523cd397062753c89a6f3178" exitCode=0 Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.545492 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xqqhh" Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.545528 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xqqhh" event={"ID":"37ef6884-efe8-4407-b22a-44d08a285acd","Type":"ContainerDied","Data":"e0195a4ef505194525e53b5e82b9eb8173a203f0523cd397062753c89a6f3178"} Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.545563 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xqqhh" event={"ID":"37ef6884-efe8-4407-b22a-44d08a285acd","Type":"ContainerDied","Data":"f5c3995c297c7b82d7b7705da65bc9231591ec597d5c1081e20ef5d3de4c3855"} Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.545580 4631 scope.go:117] "RemoveContainer" containerID="e0195a4ef505194525e53b5e82b9eb8173a203f0523cd397062753c89a6f3178" Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.569329 4631 scope.go:117] "RemoveContainer" containerID="34491aba629468e9ba9018b7ed558fb492c53fdba1fc053176bf6efaa01820dc" Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.577960 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xqqhh"] Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.588036 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xqqhh"] Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.589084 4631 scope.go:117] "RemoveContainer" containerID="106b7eedfb175771d3cfe292ecaa6a1e432106270b98bee171e929b13ecee871" Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.627343 4631 scope.go:117] "RemoveContainer" containerID="e0195a4ef505194525e53b5e82b9eb8173a203f0523cd397062753c89a6f3178" Dec 04 18:49:38 crc kubenswrapper[4631]: E1204 18:49:38.627808 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0195a4ef505194525e53b5e82b9eb8173a203f0523cd397062753c89a6f3178\": container with ID starting with e0195a4ef505194525e53b5e82b9eb8173a203f0523cd397062753c89a6f3178 not found: ID does not exist" containerID="e0195a4ef505194525e53b5e82b9eb8173a203f0523cd397062753c89a6f3178" Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.627844 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0195a4ef505194525e53b5e82b9eb8173a203f0523cd397062753c89a6f3178"} err="failed to get container status \"e0195a4ef505194525e53b5e82b9eb8173a203f0523cd397062753c89a6f3178\": rpc error: code = NotFound desc = could not find container \"e0195a4ef505194525e53b5e82b9eb8173a203f0523cd397062753c89a6f3178\": container with ID starting with e0195a4ef505194525e53b5e82b9eb8173a203f0523cd397062753c89a6f3178 not found: ID does not exist" Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.627866 4631 scope.go:117] "RemoveContainer" containerID="34491aba629468e9ba9018b7ed558fb492c53fdba1fc053176bf6efaa01820dc" Dec 04 18:49:38 crc kubenswrapper[4631]: E1204 18:49:38.628097 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34491aba629468e9ba9018b7ed558fb492c53fdba1fc053176bf6efaa01820dc\": container with ID starting with 34491aba629468e9ba9018b7ed558fb492c53fdba1fc053176bf6efaa01820dc not found: ID does not exist" containerID="34491aba629468e9ba9018b7ed558fb492c53fdba1fc053176bf6efaa01820dc" Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.628118 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34491aba629468e9ba9018b7ed558fb492c53fdba1fc053176bf6efaa01820dc"} err="failed to get container status \"34491aba629468e9ba9018b7ed558fb492c53fdba1fc053176bf6efaa01820dc\": rpc error: code = NotFound desc = could not find container \"34491aba629468e9ba9018b7ed558fb492c53fdba1fc053176bf6efaa01820dc\": container with ID starting with 34491aba629468e9ba9018b7ed558fb492c53fdba1fc053176bf6efaa01820dc not found: ID does not exist" Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.628130 4631 scope.go:117] "RemoveContainer" containerID="106b7eedfb175771d3cfe292ecaa6a1e432106270b98bee171e929b13ecee871" Dec 04 18:49:38 crc kubenswrapper[4631]: E1204 18:49:38.628323 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"106b7eedfb175771d3cfe292ecaa6a1e432106270b98bee171e929b13ecee871\": container with ID starting with 106b7eedfb175771d3cfe292ecaa6a1e432106270b98bee171e929b13ecee871 not found: ID does not exist" containerID="106b7eedfb175771d3cfe292ecaa6a1e432106270b98bee171e929b13ecee871" Dec 04 18:49:38 crc kubenswrapper[4631]: I1204 18:49:38.628343 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"106b7eedfb175771d3cfe292ecaa6a1e432106270b98bee171e929b13ecee871"} err="failed to get container status \"106b7eedfb175771d3cfe292ecaa6a1e432106270b98bee171e929b13ecee871\": rpc error: code = NotFound desc = could not find container \"106b7eedfb175771d3cfe292ecaa6a1e432106270b98bee171e929b13ecee871\": container with ID starting with 106b7eedfb175771d3cfe292ecaa6a1e432106270b98bee171e929b13ecee871 not found: ID does not exist" Dec 04 18:49:39 crc kubenswrapper[4631]: I1204 18:49:39.557620 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dqrwz" podUID="b378f30f-ad8a-41b5-98b1-1f55525c54bb" containerName="registry-server" containerID="cri-o://43df3a2b75a065828a207b34832cce38ee721e6e44244ab79428b43c91d0f29b" gracePeriod=2 Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.253875 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37ef6884-efe8-4407-b22a-44d08a285acd" path="/var/lib/kubelet/pods/37ef6884-efe8-4407-b22a-44d08a285acd/volumes" Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.566727 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dqrwz" Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.566847 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqrwz" event={"ID":"b378f30f-ad8a-41b5-98b1-1f55525c54bb","Type":"ContainerDied","Data":"43df3a2b75a065828a207b34832cce38ee721e6e44244ab79428b43c91d0f29b"} Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.567581 4631 scope.go:117] "RemoveContainer" containerID="43df3a2b75a065828a207b34832cce38ee721e6e44244ab79428b43c91d0f29b" Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.566810 4631 generic.go:334] "Generic (PLEG): container finished" podID="b378f30f-ad8a-41b5-98b1-1f55525c54bb" containerID="43df3a2b75a065828a207b34832cce38ee721e6e44244ab79428b43c91d0f29b" exitCode=0 Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.567634 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dqrwz" event={"ID":"b378f30f-ad8a-41b5-98b1-1f55525c54bb","Type":"ContainerDied","Data":"3fd4a9ace20c45d65407bfe165bcd0072b4357c7a0a6c88b921df9c2a1e6c4c1"} Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.595171 4631 scope.go:117] "RemoveContainer" containerID="e659f2d957428b4aebd1d011a587e818f1686fe031246aeda01c02f9fc617c3d" Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.621792 4631 scope.go:117] "RemoveContainer" containerID="6edf0490020b718ffa08eac60b2787657f74115a28168528eaf39b4bc83b94b7" Dec 04 18:49:40 crc kubenswrapper[4631]: E1204 18:49:40.635651 4631 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb378f30f_ad8a_41b5_98b1_1f55525c54bb.slice/crio-conmon-43df3a2b75a065828a207b34832cce38ee721e6e44244ab79428b43c91d0f29b.scope\": RecentStats: unable to find data in memory cache]" Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.673162 4631 scope.go:117] "RemoveContainer" containerID="43df3a2b75a065828a207b34832cce38ee721e6e44244ab79428b43c91d0f29b" Dec 04 18:49:40 crc kubenswrapper[4631]: E1204 18:49:40.675321 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43df3a2b75a065828a207b34832cce38ee721e6e44244ab79428b43c91d0f29b\": container with ID starting with 43df3a2b75a065828a207b34832cce38ee721e6e44244ab79428b43c91d0f29b not found: ID does not exist" containerID="43df3a2b75a065828a207b34832cce38ee721e6e44244ab79428b43c91d0f29b" Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.675605 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43df3a2b75a065828a207b34832cce38ee721e6e44244ab79428b43c91d0f29b"} err="failed to get container status \"43df3a2b75a065828a207b34832cce38ee721e6e44244ab79428b43c91d0f29b\": rpc error: code = NotFound desc = could not find container \"43df3a2b75a065828a207b34832cce38ee721e6e44244ab79428b43c91d0f29b\": container with ID starting with 43df3a2b75a065828a207b34832cce38ee721e6e44244ab79428b43c91d0f29b not found: ID does not exist" Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.675913 4631 scope.go:117] "RemoveContainer" containerID="e659f2d957428b4aebd1d011a587e818f1686fe031246aeda01c02f9fc617c3d" Dec 04 18:49:40 crc kubenswrapper[4631]: E1204 18:49:40.676547 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e659f2d957428b4aebd1d011a587e818f1686fe031246aeda01c02f9fc617c3d\": container with ID starting with e659f2d957428b4aebd1d011a587e818f1686fe031246aeda01c02f9fc617c3d not found: ID does not exist" containerID="e659f2d957428b4aebd1d011a587e818f1686fe031246aeda01c02f9fc617c3d" Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.676580 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e659f2d957428b4aebd1d011a587e818f1686fe031246aeda01c02f9fc617c3d"} err="failed to get container status \"e659f2d957428b4aebd1d011a587e818f1686fe031246aeda01c02f9fc617c3d\": rpc error: code = NotFound desc = could not find container \"e659f2d957428b4aebd1d011a587e818f1686fe031246aeda01c02f9fc617c3d\": container with ID starting with e659f2d957428b4aebd1d011a587e818f1686fe031246aeda01c02f9fc617c3d not found: ID does not exist" Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.676621 4631 scope.go:117] "RemoveContainer" containerID="6edf0490020b718ffa08eac60b2787657f74115a28168528eaf39b4bc83b94b7" Dec 04 18:49:40 crc kubenswrapper[4631]: E1204 18:49:40.676882 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6edf0490020b718ffa08eac60b2787657f74115a28168528eaf39b4bc83b94b7\": container with ID starting with 6edf0490020b718ffa08eac60b2787657f74115a28168528eaf39b4bc83b94b7 not found: ID does not exist" containerID="6edf0490020b718ffa08eac60b2787657f74115a28168528eaf39b4bc83b94b7" Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.676940 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6edf0490020b718ffa08eac60b2787657f74115a28168528eaf39b4bc83b94b7"} err="failed to get container status \"6edf0490020b718ffa08eac60b2787657f74115a28168528eaf39b4bc83b94b7\": rpc error: code = NotFound desc = could not find container \"6edf0490020b718ffa08eac60b2787657f74115a28168528eaf39b4bc83b94b7\": container with ID starting with 6edf0490020b718ffa08eac60b2787657f74115a28168528eaf39b4bc83b94b7 not found: ID does not exist" Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.779419 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b378f30f-ad8a-41b5-98b1-1f55525c54bb-catalog-content\") pod \"b378f30f-ad8a-41b5-98b1-1f55525c54bb\" (UID: \"b378f30f-ad8a-41b5-98b1-1f55525c54bb\") " Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.779543 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwrjz\" (UniqueName: \"kubernetes.io/projected/b378f30f-ad8a-41b5-98b1-1f55525c54bb-kube-api-access-wwrjz\") pod \"b378f30f-ad8a-41b5-98b1-1f55525c54bb\" (UID: \"b378f30f-ad8a-41b5-98b1-1f55525c54bb\") " Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.779594 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b378f30f-ad8a-41b5-98b1-1f55525c54bb-utilities\") pod \"b378f30f-ad8a-41b5-98b1-1f55525c54bb\" (UID: \"b378f30f-ad8a-41b5-98b1-1f55525c54bb\") " Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.780760 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b378f30f-ad8a-41b5-98b1-1f55525c54bb-utilities" (OuterVolumeSpecName: "utilities") pod "b378f30f-ad8a-41b5-98b1-1f55525c54bb" (UID: "b378f30f-ad8a-41b5-98b1-1f55525c54bb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.785140 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b378f30f-ad8a-41b5-98b1-1f55525c54bb-kube-api-access-wwrjz" (OuterVolumeSpecName: "kube-api-access-wwrjz") pod "b378f30f-ad8a-41b5-98b1-1f55525c54bb" (UID: "b378f30f-ad8a-41b5-98b1-1f55525c54bb"). InnerVolumeSpecName "kube-api-access-wwrjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.835782 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b378f30f-ad8a-41b5-98b1-1f55525c54bb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b378f30f-ad8a-41b5-98b1-1f55525c54bb" (UID: "b378f30f-ad8a-41b5-98b1-1f55525c54bb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.881801 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b378f30f-ad8a-41b5-98b1-1f55525c54bb-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.881858 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b378f30f-ad8a-41b5-98b1-1f55525c54bb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:49:40 crc kubenswrapper[4631]: I1204 18:49:40.881875 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwrjz\" (UniqueName: \"kubernetes.io/projected/b378f30f-ad8a-41b5-98b1-1f55525c54bb-kube-api-access-wwrjz\") on node \"crc\" DevicePath \"\"" Dec 04 18:49:41 crc kubenswrapper[4631]: I1204 18:49:41.576551 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dqrwz" Dec 04 18:49:41 crc kubenswrapper[4631]: I1204 18:49:41.607031 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dqrwz"] Dec 04 18:49:41 crc kubenswrapper[4631]: I1204 18:49:41.615358 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dqrwz"] Dec 04 18:49:42 crc kubenswrapper[4631]: I1204 18:49:42.257584 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b378f30f-ad8a-41b5-98b1-1f55525c54bb" path="/var/lib/kubelet/pods/b378f30f-ad8a-41b5-98b1-1f55525c54bb/volumes" Dec 04 18:50:36 crc kubenswrapper[4631]: I1204 18:50:36.023181 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:50:36 crc kubenswrapper[4631]: I1204 18:50:36.024062 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.085814 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wvsck/must-gather-5htd9"] Dec 04 18:50:37 crc kubenswrapper[4631]: E1204 18:50:37.089463 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b378f30f-ad8a-41b5-98b1-1f55525c54bb" containerName="extract-utilities" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.089488 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="b378f30f-ad8a-41b5-98b1-1f55525c54bb" containerName="extract-utilities" Dec 04 18:50:37 crc kubenswrapper[4631]: E1204 18:50:37.089506 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b378f30f-ad8a-41b5-98b1-1f55525c54bb" containerName="extract-content" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.089512 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="b378f30f-ad8a-41b5-98b1-1f55525c54bb" containerName="extract-content" Dec 04 18:50:37 crc kubenswrapper[4631]: E1204 18:50:37.089535 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b378f30f-ad8a-41b5-98b1-1f55525c54bb" containerName="registry-server" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.089541 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="b378f30f-ad8a-41b5-98b1-1f55525c54bb" containerName="registry-server" Dec 04 18:50:37 crc kubenswrapper[4631]: E1204 18:50:37.089555 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37ef6884-efe8-4407-b22a-44d08a285acd" containerName="extract-utilities" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.089560 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="37ef6884-efe8-4407-b22a-44d08a285acd" containerName="extract-utilities" Dec 04 18:50:37 crc kubenswrapper[4631]: E1204 18:50:37.089571 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37ef6884-efe8-4407-b22a-44d08a285acd" containerName="extract-content" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.089577 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="37ef6884-efe8-4407-b22a-44d08a285acd" containerName="extract-content" Dec 04 18:50:37 crc kubenswrapper[4631]: E1204 18:50:37.089586 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37ef6884-efe8-4407-b22a-44d08a285acd" containerName="registry-server" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.089592 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="37ef6884-efe8-4407-b22a-44d08a285acd" containerName="registry-server" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.089812 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="b378f30f-ad8a-41b5-98b1-1f55525c54bb" containerName="registry-server" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.089835 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="37ef6884-efe8-4407-b22a-44d08a285acd" containerName="registry-server" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.090883 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wvsck/must-gather-5htd9" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.096723 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wvsck"/"openshift-service-ca.crt" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.104153 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-wvsck"/"default-dockercfg-dkfrz" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.104357 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wvsck"/"kube-root-ca.crt" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.134380 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wvsck/must-gather-5htd9"] Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.151973 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/be06e218-e114-444c-9682-dd7d8a0feb28-must-gather-output\") pod \"must-gather-5htd9\" (UID: \"be06e218-e114-444c-9682-dd7d8a0feb28\") " pod="openshift-must-gather-wvsck/must-gather-5htd9" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.152572 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8t2h\" (UniqueName: \"kubernetes.io/projected/be06e218-e114-444c-9682-dd7d8a0feb28-kube-api-access-d8t2h\") pod \"must-gather-5htd9\" (UID: \"be06e218-e114-444c-9682-dd7d8a0feb28\") " pod="openshift-must-gather-wvsck/must-gather-5htd9" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.253928 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/be06e218-e114-444c-9682-dd7d8a0feb28-must-gather-output\") pod \"must-gather-5htd9\" (UID: \"be06e218-e114-444c-9682-dd7d8a0feb28\") " pod="openshift-must-gather-wvsck/must-gather-5htd9" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.254593 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/be06e218-e114-444c-9682-dd7d8a0feb28-must-gather-output\") pod \"must-gather-5htd9\" (UID: \"be06e218-e114-444c-9682-dd7d8a0feb28\") " pod="openshift-must-gather-wvsck/must-gather-5htd9" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.254341 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8t2h\" (UniqueName: \"kubernetes.io/projected/be06e218-e114-444c-9682-dd7d8a0feb28-kube-api-access-d8t2h\") pod \"must-gather-5htd9\" (UID: \"be06e218-e114-444c-9682-dd7d8a0feb28\") " pod="openshift-must-gather-wvsck/must-gather-5htd9" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.277158 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8t2h\" (UniqueName: \"kubernetes.io/projected/be06e218-e114-444c-9682-dd7d8a0feb28-kube-api-access-d8t2h\") pod \"must-gather-5htd9\" (UID: \"be06e218-e114-444c-9682-dd7d8a0feb28\") " pod="openshift-must-gather-wvsck/must-gather-5htd9" Dec 04 18:50:37 crc kubenswrapper[4631]: I1204 18:50:37.409824 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wvsck/must-gather-5htd9" Dec 04 18:50:38 crc kubenswrapper[4631]: I1204 18:50:38.276698 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wvsck/must-gather-5htd9"] Dec 04 18:50:39 crc kubenswrapper[4631]: I1204 18:50:39.174943 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wvsck/must-gather-5htd9" event={"ID":"be06e218-e114-444c-9682-dd7d8a0feb28","Type":"ContainerStarted","Data":"396ea6ec20b0f113f8640333c0ace13284a2d5889e081f59f9abe179d1992864"} Dec 04 18:50:39 crc kubenswrapper[4631]: I1204 18:50:39.175481 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wvsck/must-gather-5htd9" event={"ID":"be06e218-e114-444c-9682-dd7d8a0feb28","Type":"ContainerStarted","Data":"4a7a003e274ba374eefb0f52d41119e54bab408aa51ff86d9ee9911ad861b7a4"} Dec 04 18:50:39 crc kubenswrapper[4631]: I1204 18:50:39.175492 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wvsck/must-gather-5htd9" event={"ID":"be06e218-e114-444c-9682-dd7d8a0feb28","Type":"ContainerStarted","Data":"c2c23f14d698f7e2bafdf211a16c0c17f8af0dc52e172683123537b984355aa0"} Dec 04 18:50:39 crc kubenswrapper[4631]: I1204 18:50:39.194213 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wvsck/must-gather-5htd9" podStartSLOduration=2.194198871 podStartE2EDuration="2.194198871s" podCreationTimestamp="2025-12-04 18:50:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 18:50:39.191622787 +0000 UTC m=+4969.223864785" watchObservedRunningTime="2025-12-04 18:50:39.194198871 +0000 UTC m=+4969.226440869" Dec 04 18:50:42 crc kubenswrapper[4631]: I1204 18:50:42.335643 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wvsck/crc-debug-mk8v2"] Dec 04 18:50:42 crc kubenswrapper[4631]: I1204 18:50:42.337688 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wvsck/crc-debug-mk8v2" Dec 04 18:50:42 crc kubenswrapper[4631]: I1204 18:50:42.464014 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/09adeb31-c2fe-47e0-a16d-5e3e6738333f-host\") pod \"crc-debug-mk8v2\" (UID: \"09adeb31-c2fe-47e0-a16d-5e3e6738333f\") " pod="openshift-must-gather-wvsck/crc-debug-mk8v2" Dec 04 18:50:42 crc kubenswrapper[4631]: I1204 18:50:42.464391 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kk5l7\" (UniqueName: \"kubernetes.io/projected/09adeb31-c2fe-47e0-a16d-5e3e6738333f-kube-api-access-kk5l7\") pod \"crc-debug-mk8v2\" (UID: \"09adeb31-c2fe-47e0-a16d-5e3e6738333f\") " pod="openshift-must-gather-wvsck/crc-debug-mk8v2" Dec 04 18:50:42 crc kubenswrapper[4631]: I1204 18:50:42.566749 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/09adeb31-c2fe-47e0-a16d-5e3e6738333f-host\") pod \"crc-debug-mk8v2\" (UID: \"09adeb31-c2fe-47e0-a16d-5e3e6738333f\") " pod="openshift-must-gather-wvsck/crc-debug-mk8v2" Dec 04 18:50:42 crc kubenswrapper[4631]: I1204 18:50:42.566814 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kk5l7\" (UniqueName: \"kubernetes.io/projected/09adeb31-c2fe-47e0-a16d-5e3e6738333f-kube-api-access-kk5l7\") pod \"crc-debug-mk8v2\" (UID: \"09adeb31-c2fe-47e0-a16d-5e3e6738333f\") " pod="openshift-must-gather-wvsck/crc-debug-mk8v2" Dec 04 18:50:42 crc kubenswrapper[4631]: I1204 18:50:42.566900 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/09adeb31-c2fe-47e0-a16d-5e3e6738333f-host\") pod \"crc-debug-mk8v2\" (UID: \"09adeb31-c2fe-47e0-a16d-5e3e6738333f\") " pod="openshift-must-gather-wvsck/crc-debug-mk8v2" Dec 04 18:50:42 crc kubenswrapper[4631]: I1204 18:50:42.592332 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kk5l7\" (UniqueName: \"kubernetes.io/projected/09adeb31-c2fe-47e0-a16d-5e3e6738333f-kube-api-access-kk5l7\") pod \"crc-debug-mk8v2\" (UID: \"09adeb31-c2fe-47e0-a16d-5e3e6738333f\") " pod="openshift-must-gather-wvsck/crc-debug-mk8v2" Dec 04 18:50:42 crc kubenswrapper[4631]: I1204 18:50:42.654547 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wvsck/crc-debug-mk8v2" Dec 04 18:50:42 crc kubenswrapper[4631]: W1204 18:50:42.680687 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09adeb31_c2fe_47e0_a16d_5e3e6738333f.slice/crio-b5f23fc44c440cdc414c3d26037c3a4a0b134b36bd1426dfb7aee83bd9bf1d7e WatchSource:0}: Error finding container b5f23fc44c440cdc414c3d26037c3a4a0b134b36bd1426dfb7aee83bd9bf1d7e: Status 404 returned error can't find the container with id b5f23fc44c440cdc414c3d26037c3a4a0b134b36bd1426dfb7aee83bd9bf1d7e Dec 04 18:50:43 crc kubenswrapper[4631]: I1204 18:50:43.223241 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wvsck/crc-debug-mk8v2" event={"ID":"09adeb31-c2fe-47e0-a16d-5e3e6738333f","Type":"ContainerStarted","Data":"cf6689bab790b2867dc649dbbe8396460a547e01bb01307fa9fafe9691990fcc"} Dec 04 18:50:43 crc kubenswrapper[4631]: I1204 18:50:43.223889 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wvsck/crc-debug-mk8v2" event={"ID":"09adeb31-c2fe-47e0-a16d-5e3e6738333f","Type":"ContainerStarted","Data":"b5f23fc44c440cdc414c3d26037c3a4a0b134b36bd1426dfb7aee83bd9bf1d7e"} Dec 04 18:50:43 crc kubenswrapper[4631]: I1204 18:50:43.247683 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wvsck/crc-debug-mk8v2" podStartSLOduration=1.247667321 podStartE2EDuration="1.247667321s" podCreationTimestamp="2025-12-04 18:50:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-04 18:50:43.242801072 +0000 UTC m=+4973.275043080" watchObservedRunningTime="2025-12-04 18:50:43.247667321 +0000 UTC m=+4973.279909319" Dec 04 18:51:06 crc kubenswrapper[4631]: I1204 18:51:06.022864 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:51:06 crc kubenswrapper[4631]: I1204 18:51:06.024202 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:51:23 crc kubenswrapper[4631]: I1204 18:51:23.561817 4631 generic.go:334] "Generic (PLEG): container finished" podID="09adeb31-c2fe-47e0-a16d-5e3e6738333f" containerID="cf6689bab790b2867dc649dbbe8396460a547e01bb01307fa9fafe9691990fcc" exitCode=0 Dec 04 18:51:23 crc kubenswrapper[4631]: I1204 18:51:23.561918 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wvsck/crc-debug-mk8v2" event={"ID":"09adeb31-c2fe-47e0-a16d-5e3e6738333f","Type":"ContainerDied","Data":"cf6689bab790b2867dc649dbbe8396460a547e01bb01307fa9fafe9691990fcc"} Dec 04 18:51:24 crc kubenswrapper[4631]: I1204 18:51:24.682782 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wvsck/crc-debug-mk8v2" Dec 04 18:51:24 crc kubenswrapper[4631]: I1204 18:51:24.726540 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wvsck/crc-debug-mk8v2"] Dec 04 18:51:24 crc kubenswrapper[4631]: I1204 18:51:24.735534 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wvsck/crc-debug-mk8v2"] Dec 04 18:51:24 crc kubenswrapper[4631]: I1204 18:51:24.773310 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kk5l7\" (UniqueName: \"kubernetes.io/projected/09adeb31-c2fe-47e0-a16d-5e3e6738333f-kube-api-access-kk5l7\") pod \"09adeb31-c2fe-47e0-a16d-5e3e6738333f\" (UID: \"09adeb31-c2fe-47e0-a16d-5e3e6738333f\") " Dec 04 18:51:24 crc kubenswrapper[4631]: I1204 18:51:24.773829 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/09adeb31-c2fe-47e0-a16d-5e3e6738333f-host\") pod \"09adeb31-c2fe-47e0-a16d-5e3e6738333f\" (UID: \"09adeb31-c2fe-47e0-a16d-5e3e6738333f\") " Dec 04 18:51:24 crc kubenswrapper[4631]: I1204 18:51:24.773928 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/09adeb31-c2fe-47e0-a16d-5e3e6738333f-host" (OuterVolumeSpecName: "host") pod "09adeb31-c2fe-47e0-a16d-5e3e6738333f" (UID: "09adeb31-c2fe-47e0-a16d-5e3e6738333f"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 18:51:24 crc kubenswrapper[4631]: I1204 18:51:24.774408 4631 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/09adeb31-c2fe-47e0-a16d-5e3e6738333f-host\") on node \"crc\" DevicePath \"\"" Dec 04 18:51:24 crc kubenswrapper[4631]: I1204 18:51:24.795010 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09adeb31-c2fe-47e0-a16d-5e3e6738333f-kube-api-access-kk5l7" (OuterVolumeSpecName: "kube-api-access-kk5l7") pod "09adeb31-c2fe-47e0-a16d-5e3e6738333f" (UID: "09adeb31-c2fe-47e0-a16d-5e3e6738333f"). InnerVolumeSpecName "kube-api-access-kk5l7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:51:24 crc kubenswrapper[4631]: I1204 18:51:24.875611 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kk5l7\" (UniqueName: \"kubernetes.io/projected/09adeb31-c2fe-47e0-a16d-5e3e6738333f-kube-api-access-kk5l7\") on node \"crc\" DevicePath \"\"" Dec 04 18:51:24 crc kubenswrapper[4631]: I1204 18:51:24.913957 4631 scope.go:117] "RemoveContainer" containerID="b26225c492aa13a6b78e386f4626bb1a9b46b4b390df360368829d1c6b9ea226" Dec 04 18:51:24 crc kubenswrapper[4631]: I1204 18:51:24.936929 4631 scope.go:117] "RemoveContainer" containerID="f5bd8e29c7215e2868f1800922ec6191605040c3edb0e873956cd1edfcb7c543" Dec 04 18:51:24 crc kubenswrapper[4631]: I1204 18:51:24.955839 4631 scope.go:117] "RemoveContainer" containerID="a88fab6e57a2ec6b989e99b8d1262b5281cb6109f69216497cf458bedc32632a" Dec 04 18:51:25 crc kubenswrapper[4631]: I1204 18:51:25.578897 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5f23fc44c440cdc414c3d26037c3a4a0b134b36bd1426dfb7aee83bd9bf1d7e" Dec 04 18:51:25 crc kubenswrapper[4631]: I1204 18:51:25.578965 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wvsck/crc-debug-mk8v2" Dec 04 18:51:25 crc kubenswrapper[4631]: I1204 18:51:25.980855 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wvsck/crc-debug-mr2d9"] Dec 04 18:51:25 crc kubenswrapper[4631]: E1204 18:51:25.981588 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09adeb31-c2fe-47e0-a16d-5e3e6738333f" containerName="container-00" Dec 04 18:51:25 crc kubenswrapper[4631]: I1204 18:51:25.981625 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="09adeb31-c2fe-47e0-a16d-5e3e6738333f" containerName="container-00" Dec 04 18:51:25 crc kubenswrapper[4631]: I1204 18:51:25.981840 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="09adeb31-c2fe-47e0-a16d-5e3e6738333f" containerName="container-00" Dec 04 18:51:25 crc kubenswrapper[4631]: I1204 18:51:25.982428 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wvsck/crc-debug-mr2d9" Dec 04 18:51:26 crc kubenswrapper[4631]: I1204 18:51:26.099006 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b862b8b-db07-4173-a62d-89a5965df136-host\") pod \"crc-debug-mr2d9\" (UID: \"3b862b8b-db07-4173-a62d-89a5965df136\") " pod="openshift-must-gather-wvsck/crc-debug-mr2d9" Dec 04 18:51:26 crc kubenswrapper[4631]: I1204 18:51:26.099060 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6tkw\" (UniqueName: \"kubernetes.io/projected/3b862b8b-db07-4173-a62d-89a5965df136-kube-api-access-r6tkw\") pod \"crc-debug-mr2d9\" (UID: \"3b862b8b-db07-4173-a62d-89a5965df136\") " pod="openshift-must-gather-wvsck/crc-debug-mr2d9" Dec 04 18:51:26 crc kubenswrapper[4631]: I1204 18:51:26.200898 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b862b8b-db07-4173-a62d-89a5965df136-host\") pod \"crc-debug-mr2d9\" (UID: \"3b862b8b-db07-4173-a62d-89a5965df136\") " pod="openshift-must-gather-wvsck/crc-debug-mr2d9" Dec 04 18:51:26 crc kubenswrapper[4631]: I1204 18:51:26.200954 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6tkw\" (UniqueName: \"kubernetes.io/projected/3b862b8b-db07-4173-a62d-89a5965df136-kube-api-access-r6tkw\") pod \"crc-debug-mr2d9\" (UID: \"3b862b8b-db07-4173-a62d-89a5965df136\") " pod="openshift-must-gather-wvsck/crc-debug-mr2d9" Dec 04 18:51:26 crc kubenswrapper[4631]: I1204 18:51:26.201050 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b862b8b-db07-4173-a62d-89a5965df136-host\") pod \"crc-debug-mr2d9\" (UID: \"3b862b8b-db07-4173-a62d-89a5965df136\") " pod="openshift-must-gather-wvsck/crc-debug-mr2d9" Dec 04 18:51:26 crc kubenswrapper[4631]: I1204 18:51:26.220399 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6tkw\" (UniqueName: \"kubernetes.io/projected/3b862b8b-db07-4173-a62d-89a5965df136-kube-api-access-r6tkw\") pod \"crc-debug-mr2d9\" (UID: \"3b862b8b-db07-4173-a62d-89a5965df136\") " pod="openshift-must-gather-wvsck/crc-debug-mr2d9" Dec 04 18:51:26 crc kubenswrapper[4631]: I1204 18:51:26.249825 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09adeb31-c2fe-47e0-a16d-5e3e6738333f" path="/var/lib/kubelet/pods/09adeb31-c2fe-47e0-a16d-5e3e6738333f/volumes" Dec 04 18:51:26 crc kubenswrapper[4631]: I1204 18:51:26.300471 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wvsck/crc-debug-mr2d9" Dec 04 18:51:26 crc kubenswrapper[4631]: W1204 18:51:26.332052 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b862b8b_db07_4173_a62d_89a5965df136.slice/crio-ce3a5a00b7bc84342da88d1789923f9e62660531c639a53784133ea8f546d5f0 WatchSource:0}: Error finding container ce3a5a00b7bc84342da88d1789923f9e62660531c639a53784133ea8f546d5f0: Status 404 returned error can't find the container with id ce3a5a00b7bc84342da88d1789923f9e62660531c639a53784133ea8f546d5f0 Dec 04 18:51:26 crc kubenswrapper[4631]: I1204 18:51:26.594671 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wvsck/crc-debug-mr2d9" event={"ID":"3b862b8b-db07-4173-a62d-89a5965df136","Type":"ContainerStarted","Data":"ce3a5a00b7bc84342da88d1789923f9e62660531c639a53784133ea8f546d5f0"} Dec 04 18:51:27 crc kubenswrapper[4631]: I1204 18:51:27.613965 4631 generic.go:334] "Generic (PLEG): container finished" podID="3b862b8b-db07-4173-a62d-89a5965df136" containerID="d609e9439f5426ec3faf00cf289265b20daa7012e313d03c388765e6fa873e97" exitCode=0 Dec 04 18:51:27 crc kubenswrapper[4631]: I1204 18:51:27.614108 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wvsck/crc-debug-mr2d9" event={"ID":"3b862b8b-db07-4173-a62d-89a5965df136","Type":"ContainerDied","Data":"d609e9439f5426ec3faf00cf289265b20daa7012e313d03c388765e6fa873e97"} Dec 04 18:51:28 crc kubenswrapper[4631]: I1204 18:51:28.745851 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wvsck/crc-debug-mr2d9" Dec 04 18:51:28 crc kubenswrapper[4631]: I1204 18:51:28.846774 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b862b8b-db07-4173-a62d-89a5965df136-host\") pod \"3b862b8b-db07-4173-a62d-89a5965df136\" (UID: \"3b862b8b-db07-4173-a62d-89a5965df136\") " Dec 04 18:51:28 crc kubenswrapper[4631]: I1204 18:51:28.846941 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6tkw\" (UniqueName: \"kubernetes.io/projected/3b862b8b-db07-4173-a62d-89a5965df136-kube-api-access-r6tkw\") pod \"3b862b8b-db07-4173-a62d-89a5965df136\" (UID: \"3b862b8b-db07-4173-a62d-89a5965df136\") " Dec 04 18:51:28 crc kubenswrapper[4631]: I1204 18:51:28.847070 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3b862b8b-db07-4173-a62d-89a5965df136-host" (OuterVolumeSpecName: "host") pod "3b862b8b-db07-4173-a62d-89a5965df136" (UID: "3b862b8b-db07-4173-a62d-89a5965df136"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 18:51:28 crc kubenswrapper[4631]: I1204 18:51:28.847358 4631 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b862b8b-db07-4173-a62d-89a5965df136-host\") on node \"crc\" DevicePath \"\"" Dec 04 18:51:28 crc kubenswrapper[4631]: I1204 18:51:28.859005 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b862b8b-db07-4173-a62d-89a5965df136-kube-api-access-r6tkw" (OuterVolumeSpecName: "kube-api-access-r6tkw") pod "3b862b8b-db07-4173-a62d-89a5965df136" (UID: "3b862b8b-db07-4173-a62d-89a5965df136"). InnerVolumeSpecName "kube-api-access-r6tkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:51:28 crc kubenswrapper[4631]: I1204 18:51:28.948640 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6tkw\" (UniqueName: \"kubernetes.io/projected/3b862b8b-db07-4173-a62d-89a5965df136-kube-api-access-r6tkw\") on node \"crc\" DevicePath \"\"" Dec 04 18:51:29 crc kubenswrapper[4631]: I1204 18:51:29.630820 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wvsck/crc-debug-mr2d9" event={"ID":"3b862b8b-db07-4173-a62d-89a5965df136","Type":"ContainerDied","Data":"ce3a5a00b7bc84342da88d1789923f9e62660531c639a53784133ea8f546d5f0"} Dec 04 18:51:29 crc kubenswrapper[4631]: I1204 18:51:29.630859 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce3a5a00b7bc84342da88d1789923f9e62660531c639a53784133ea8f546d5f0" Dec 04 18:51:29 crc kubenswrapper[4631]: I1204 18:51:29.630885 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wvsck/crc-debug-mr2d9" Dec 04 18:51:29 crc kubenswrapper[4631]: I1204 18:51:29.847025 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wvsck/crc-debug-mr2d9"] Dec 04 18:51:29 crc kubenswrapper[4631]: I1204 18:51:29.854329 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wvsck/crc-debug-mr2d9"] Dec 04 18:51:30 crc kubenswrapper[4631]: I1204 18:51:30.250770 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b862b8b-db07-4173-a62d-89a5965df136" path="/var/lib/kubelet/pods/3b862b8b-db07-4173-a62d-89a5965df136/volumes" Dec 04 18:51:30 crc kubenswrapper[4631]: I1204 18:51:30.992853 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wvsck/crc-debug-wh2jv"] Dec 04 18:51:30 crc kubenswrapper[4631]: E1204 18:51:30.993243 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b862b8b-db07-4173-a62d-89a5965df136" containerName="container-00" Dec 04 18:51:30 crc kubenswrapper[4631]: I1204 18:51:30.993255 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b862b8b-db07-4173-a62d-89a5965df136" containerName="container-00" Dec 04 18:51:30 crc kubenswrapper[4631]: I1204 18:51:30.993457 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b862b8b-db07-4173-a62d-89a5965df136" containerName="container-00" Dec 04 18:51:30 crc kubenswrapper[4631]: I1204 18:51:30.994977 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wvsck/crc-debug-wh2jv" Dec 04 18:51:31 crc kubenswrapper[4631]: I1204 18:51:31.085826 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cvbl\" (UniqueName: \"kubernetes.io/projected/46cccdfa-b640-4130-846e-c4fd582c511b-kube-api-access-8cvbl\") pod \"crc-debug-wh2jv\" (UID: \"46cccdfa-b640-4130-846e-c4fd582c511b\") " pod="openshift-must-gather-wvsck/crc-debug-wh2jv" Dec 04 18:51:31 crc kubenswrapper[4631]: I1204 18:51:31.085982 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46cccdfa-b640-4130-846e-c4fd582c511b-host\") pod \"crc-debug-wh2jv\" (UID: \"46cccdfa-b640-4130-846e-c4fd582c511b\") " pod="openshift-must-gather-wvsck/crc-debug-wh2jv" Dec 04 18:51:31 crc kubenswrapper[4631]: I1204 18:51:31.187452 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46cccdfa-b640-4130-846e-c4fd582c511b-host\") pod \"crc-debug-wh2jv\" (UID: \"46cccdfa-b640-4130-846e-c4fd582c511b\") " pod="openshift-must-gather-wvsck/crc-debug-wh2jv" Dec 04 18:51:31 crc kubenswrapper[4631]: I1204 18:51:31.187561 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cvbl\" (UniqueName: \"kubernetes.io/projected/46cccdfa-b640-4130-846e-c4fd582c511b-kube-api-access-8cvbl\") pod \"crc-debug-wh2jv\" (UID: \"46cccdfa-b640-4130-846e-c4fd582c511b\") " pod="openshift-must-gather-wvsck/crc-debug-wh2jv" Dec 04 18:51:31 crc kubenswrapper[4631]: I1204 18:51:31.187617 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46cccdfa-b640-4130-846e-c4fd582c511b-host\") pod \"crc-debug-wh2jv\" (UID: \"46cccdfa-b640-4130-846e-c4fd582c511b\") " pod="openshift-must-gather-wvsck/crc-debug-wh2jv" Dec 04 18:51:31 crc kubenswrapper[4631]: I1204 18:51:31.225699 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cvbl\" (UniqueName: \"kubernetes.io/projected/46cccdfa-b640-4130-846e-c4fd582c511b-kube-api-access-8cvbl\") pod \"crc-debug-wh2jv\" (UID: \"46cccdfa-b640-4130-846e-c4fd582c511b\") " pod="openshift-must-gather-wvsck/crc-debug-wh2jv" Dec 04 18:51:31 crc kubenswrapper[4631]: I1204 18:51:31.329757 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wvsck/crc-debug-wh2jv" Dec 04 18:51:31 crc kubenswrapper[4631]: I1204 18:51:31.648881 4631 generic.go:334] "Generic (PLEG): container finished" podID="46cccdfa-b640-4130-846e-c4fd582c511b" containerID="00d829a1b6b6cf5c4af1a9b1b199be0840897db34a01b13cfec4b1561bf95f03" exitCode=0 Dec 04 18:51:31 crc kubenswrapper[4631]: I1204 18:51:31.649049 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wvsck/crc-debug-wh2jv" event={"ID":"46cccdfa-b640-4130-846e-c4fd582c511b","Type":"ContainerDied","Data":"00d829a1b6b6cf5c4af1a9b1b199be0840897db34a01b13cfec4b1561bf95f03"} Dec 04 18:51:31 crc kubenswrapper[4631]: I1204 18:51:31.649203 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wvsck/crc-debug-wh2jv" event={"ID":"46cccdfa-b640-4130-846e-c4fd582c511b","Type":"ContainerStarted","Data":"13a4a19f7e98399fca109027596cc5f18676ec66fbce24670b54b2ac190c87eb"} Dec 04 18:51:31 crc kubenswrapper[4631]: I1204 18:51:31.687299 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wvsck/crc-debug-wh2jv"] Dec 04 18:51:31 crc kubenswrapper[4631]: I1204 18:51:31.702399 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wvsck/crc-debug-wh2jv"] Dec 04 18:51:32 crc kubenswrapper[4631]: I1204 18:51:32.749129 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wvsck/crc-debug-wh2jv" Dec 04 18:51:32 crc kubenswrapper[4631]: I1204 18:51:32.821040 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cvbl\" (UniqueName: \"kubernetes.io/projected/46cccdfa-b640-4130-846e-c4fd582c511b-kube-api-access-8cvbl\") pod \"46cccdfa-b640-4130-846e-c4fd582c511b\" (UID: \"46cccdfa-b640-4130-846e-c4fd582c511b\") " Dec 04 18:51:32 crc kubenswrapper[4631]: I1204 18:51:32.821095 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46cccdfa-b640-4130-846e-c4fd582c511b-host\") pod \"46cccdfa-b640-4130-846e-c4fd582c511b\" (UID: \"46cccdfa-b640-4130-846e-c4fd582c511b\") " Dec 04 18:51:32 crc kubenswrapper[4631]: I1204 18:51:32.821257 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46cccdfa-b640-4130-846e-c4fd582c511b-host" (OuterVolumeSpecName: "host") pod "46cccdfa-b640-4130-846e-c4fd582c511b" (UID: "46cccdfa-b640-4130-846e-c4fd582c511b"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 04 18:51:32 crc kubenswrapper[4631]: I1204 18:51:32.821528 4631 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46cccdfa-b640-4130-846e-c4fd582c511b-host\") on node \"crc\" DevicePath \"\"" Dec 04 18:51:32 crc kubenswrapper[4631]: I1204 18:51:32.827045 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46cccdfa-b640-4130-846e-c4fd582c511b-kube-api-access-8cvbl" (OuterVolumeSpecName: "kube-api-access-8cvbl") pod "46cccdfa-b640-4130-846e-c4fd582c511b" (UID: "46cccdfa-b640-4130-846e-c4fd582c511b"). InnerVolumeSpecName "kube-api-access-8cvbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:51:32 crc kubenswrapper[4631]: I1204 18:51:32.923108 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cvbl\" (UniqueName: \"kubernetes.io/projected/46cccdfa-b640-4130-846e-c4fd582c511b-kube-api-access-8cvbl\") on node \"crc\" DevicePath \"\"" Dec 04 18:51:33 crc kubenswrapper[4631]: I1204 18:51:33.664924 4631 scope.go:117] "RemoveContainer" containerID="00d829a1b6b6cf5c4af1a9b1b199be0840897db34a01b13cfec4b1561bf95f03" Dec 04 18:51:33 crc kubenswrapper[4631]: I1204 18:51:33.665054 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wvsck/crc-debug-wh2jv" Dec 04 18:51:34 crc kubenswrapper[4631]: I1204 18:51:34.253141 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46cccdfa-b640-4130-846e-c4fd582c511b" path="/var/lib/kubelet/pods/46cccdfa-b640-4130-846e-c4fd582c511b/volumes" Dec 04 18:51:36 crc kubenswrapper[4631]: I1204 18:51:36.022841 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:51:36 crc kubenswrapper[4631]: I1204 18:51:36.023112 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:51:36 crc kubenswrapper[4631]: I1204 18:51:36.023153 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 18:51:36 crc kubenswrapper[4631]: I1204 18:51:36.024026 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149"} pod="openshift-machine-config-operator/machine-config-daemon-q27wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 18:51:36 crc kubenswrapper[4631]: I1204 18:51:36.024076 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" containerID="cri-o://05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" gracePeriod=600 Dec 04 18:51:36 crc kubenswrapper[4631]: E1204 18:51:36.162624 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:51:36 crc kubenswrapper[4631]: I1204 18:51:36.691648 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" exitCode=0 Dec 04 18:51:36 crc kubenswrapper[4631]: I1204 18:51:36.691704 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerDied","Data":"05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149"} Dec 04 18:51:36 crc kubenswrapper[4631]: I1204 18:51:36.691892 4631 scope.go:117] "RemoveContainer" containerID="05839d8d0766fa33b7eecf4a64f629e489a79c0a60120c9d4e2c2360bc76ae5f" Dec 04 18:51:36 crc kubenswrapper[4631]: I1204 18:51:36.693005 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:51:36 crc kubenswrapper[4631]: E1204 18:51:36.694055 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:51:49 crc kubenswrapper[4631]: I1204 18:51:49.240139 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:51:49 crc kubenswrapper[4631]: E1204 18:51:49.240870 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:52:00 crc kubenswrapper[4631]: I1204 18:52:00.247254 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:52:00 crc kubenswrapper[4631]: E1204 18:52:00.248307 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:52:07 crc kubenswrapper[4631]: I1204 18:52:07.132791 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6866f4d6b8-5wp55_f65b2092-9992-4e4d-be14-6ea85af840a0/barbican-api/0.log" Dec 04 18:52:07 crc kubenswrapper[4631]: I1204 18:52:07.161350 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6866f4d6b8-5wp55_f65b2092-9992-4e4d-be14-6ea85af840a0/barbican-api-log/0.log" Dec 04 18:52:07 crc kubenswrapper[4631]: I1204 18:52:07.351828 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-75cd87c688-xlr7b_b5516312-5bde-4c7d-8910-bf75f2a98812/barbican-keystone-listener/0.log" Dec 04 18:52:07 crc kubenswrapper[4631]: I1204 18:52:07.435073 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-75cd87c688-xlr7b_b5516312-5bde-4c7d-8910-bf75f2a98812/barbican-keystone-listener-log/0.log" Dec 04 18:52:07 crc kubenswrapper[4631]: I1204 18:52:07.542024 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-f9b7c48cf-xdj7r_1de91a80-bddc-4f80-bf05-0d1aba161730/barbican-worker/0.log" Dec 04 18:52:07 crc kubenswrapper[4631]: I1204 18:52:07.646415 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-f9b7c48cf-xdj7r_1de91a80-bddc-4f80-bf05-0d1aba161730/barbican-worker-log/0.log" Dec 04 18:52:07 crc kubenswrapper[4631]: I1204 18:52:07.746746 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-c6sjn_9789c9a8-e2ff-4344-a946-81d8a8ef26fe/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:52:07 crc kubenswrapper[4631]: I1204 18:52:07.905970 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b4871a6a-2cea-402a-9dfe-e72887258bb5/ceilometer-central-agent/0.log" Dec 04 18:52:08 crc kubenswrapper[4631]: I1204 18:52:08.002170 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b4871a6a-2cea-402a-9dfe-e72887258bb5/ceilometer-notification-agent/0.log" Dec 04 18:52:08 crc kubenswrapper[4631]: I1204 18:52:08.035044 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b4871a6a-2cea-402a-9dfe-e72887258bb5/proxy-httpd/0.log" Dec 04 18:52:08 crc kubenswrapper[4631]: I1204 18:52:08.087482 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b4871a6a-2cea-402a-9dfe-e72887258bb5/sg-core/0.log" Dec 04 18:52:08 crc kubenswrapper[4631]: I1204 18:52:08.283163 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_6b9d56f1-c2cf-471c-934b-15a0497af44b/cinder-api-log/0.log" Dec 04 18:52:08 crc kubenswrapper[4631]: I1204 18:52:08.368783 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_6b9d56f1-c2cf-471c-934b-15a0497af44b/cinder-api/0.log" Dec 04 18:52:08 crc kubenswrapper[4631]: I1204 18:52:08.452679 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_f5599bd7-2ca5-4217-a0bd-785b3fb612b7/cinder-scheduler/0.log" Dec 04 18:52:08 crc kubenswrapper[4631]: I1204 18:52:08.646270 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_f5599bd7-2ca5-4217-a0bd-785b3fb612b7/probe/0.log" Dec 04 18:52:08 crc kubenswrapper[4631]: I1204 18:52:08.737338 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-bwghg_9dbef2cb-27b3-4de9-bbcc-5ef587d1ed65/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:52:08 crc kubenswrapper[4631]: I1204 18:52:08.929212 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-gtt8s_c303f9ff-2337-47a3-8e07-4ace557cc99a/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:52:09 crc kubenswrapper[4631]: I1204 18:52:09.025042 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7b659bdd7f-ndp7s_52592900-79a1-4fa6-8eb3-628f25972f5f/init/0.log" Dec 04 18:52:09 crc kubenswrapper[4631]: I1204 18:52:09.253801 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7b659bdd7f-ndp7s_52592900-79a1-4fa6-8eb3-628f25972f5f/init/0.log" Dec 04 18:52:09 crc kubenswrapper[4631]: I1204 18:52:09.339701 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7b659bdd7f-ndp7s_52592900-79a1-4fa6-8eb3-628f25972f5f/dnsmasq-dns/0.log" Dec 04 18:52:09 crc kubenswrapper[4631]: I1204 18:52:09.453857 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-2n9xq_8a60b6a3-2e66-46ad-987f-9c6aac93e03f/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:52:09 crc kubenswrapper[4631]: I1204 18:52:09.856544 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_7f61006f-d20d-43ed-94d6-95615925184f/glance-httpd/0.log" Dec 04 18:52:09 crc kubenswrapper[4631]: I1204 18:52:09.863162 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_7f61006f-d20d-43ed-94d6-95615925184f/glance-log/0.log" Dec 04 18:52:10 crc kubenswrapper[4631]: I1204 18:52:10.134466 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f773d050-d51b-4753-9be0-8f5a91c674bc/glance-httpd/0.log" Dec 04 18:52:10 crc kubenswrapper[4631]: I1204 18:52:10.156244 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f773d050-d51b-4753-9be0-8f5a91c674bc/glance-log/0.log" Dec 04 18:52:10 crc kubenswrapper[4631]: I1204 18:52:10.292829 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-77d5fd455b-8kwkp_78aafb4d-470c-477d-bfe6-5b7a29b79fc0/horizon/1.log" Dec 04 18:52:10 crc kubenswrapper[4631]: I1204 18:52:10.616571 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-77d5fd455b-8kwkp_78aafb4d-470c-477d-bfe6-5b7a29b79fc0/horizon/0.log" Dec 04 18:52:10 crc kubenswrapper[4631]: I1204 18:52:10.771097 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-cxdjv_52d13c44-1eee-4a4b-bd73-982e9d57f0d8/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:52:10 crc kubenswrapper[4631]: I1204 18:52:10.908948 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-77d5fd455b-8kwkp_78aafb4d-470c-477d-bfe6-5b7a29b79fc0/horizon-log/0.log" Dec 04 18:52:10 crc kubenswrapper[4631]: I1204 18:52:10.996446 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-5zdzl_191d10cc-aa2a-48dd-bbe5-ee2a4f63fcef/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:52:11 crc kubenswrapper[4631]: I1204 18:52:11.282169 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29414521-6wj5d_bc7ca9f0-8a6d-47c6-8e63-5a5d04a83958/keystone-cron/0.log" Dec 04 18:52:11 crc kubenswrapper[4631]: I1204 18:52:11.549385 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_83a88a9d-413f-40ce-bae5-624b4cfe00c9/kube-state-metrics/0.log" Dec 04 18:52:11 crc kubenswrapper[4631]: I1204 18:52:11.576659 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-589bf6fb8-62vft_138d8c39-c5e9-48bf-83b7-efc22bc3ec1e/keystone-api/0.log" Dec 04 18:52:11 crc kubenswrapper[4631]: I1204 18:52:11.742585 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-zh6q8_d2cf3bf3-fe05-449a-9f5c-e7e7589e11ec/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:52:12 crc kubenswrapper[4631]: I1204 18:52:12.226795 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-6r6wg_5bba5c47-0692-477b-9483-f80218571763/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:52:12 crc kubenswrapper[4631]: I1204 18:52:12.240332 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:52:12 crc kubenswrapper[4631]: E1204 18:52:12.240586 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:52:12 crc kubenswrapper[4631]: I1204 18:52:12.479259 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6d6c6b7549-c7hqg_7dc0a764-9aea-494f-b71e-eb0df5cf3d66/neutron-httpd/0.log" Dec 04 18:52:12 crc kubenswrapper[4631]: I1204 18:52:12.643961 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6d6c6b7549-c7hqg_7dc0a764-9aea-494f-b71e-eb0df5cf3d66/neutron-api/0.log" Dec 04 18:52:13 crc kubenswrapper[4631]: I1204 18:52:13.739051 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_fe9c3ab8-326e-49a0-8fe3-b54c15c89051/nova-cell0-conductor-conductor/0.log" Dec 04 18:52:13 crc kubenswrapper[4631]: I1204 18:52:13.785969 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_8f18cd83-a5c6-455c-87de-2549f96b9073/nova-cell1-conductor-conductor/0.log" Dec 04 18:52:14 crc kubenswrapper[4631]: I1204 18:52:14.121044 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_b96713c5-6fba-4ee6-9111-5aedf572a172/nova-api-log/0.log" Dec 04 18:52:14 crc kubenswrapper[4631]: I1204 18:52:14.273768 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_73bece06-ddcd-4bd7-9f77-1c7551dd5c10/nova-cell1-novncproxy-novncproxy/0.log" Dec 04 18:52:14 crc kubenswrapper[4631]: I1204 18:52:14.370109 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_b96713c5-6fba-4ee6-9111-5aedf572a172/nova-api-api/0.log" Dec 04 18:52:14 crc kubenswrapper[4631]: I1204 18:52:14.468641 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-lqlcs_93cd2870-edd3-4b7f-9868-6c437dcf3164/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:52:14 crc kubenswrapper[4631]: I1204 18:52:14.616856 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_df12d5a5-6083-4b46-b6bb-8894eb4f421b/nova-metadata-log/0.log" Dec 04 18:52:15 crc kubenswrapper[4631]: I1204 18:52:15.063598 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_4c1e28f2-5820-4e06-a20b-a9062d8280be/mysql-bootstrap/0.log" Dec 04 18:52:15 crc kubenswrapper[4631]: I1204 18:52:15.330753 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_4c1e28f2-5820-4e06-a20b-a9062d8280be/mysql-bootstrap/0.log" Dec 04 18:52:15 crc kubenswrapper[4631]: I1204 18:52:15.407915 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_4c1e28f2-5820-4e06-a20b-a9062d8280be/galera/0.log" Dec 04 18:52:15 crc kubenswrapper[4631]: I1204 18:52:15.472064 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_77627c2f-d3c1-4699-9c42-8ab97657f312/nova-scheduler-scheduler/0.log" Dec 04 18:52:15 crc kubenswrapper[4631]: I1204 18:52:15.685043 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_0c7df533-7298-4204-aeca-992631c9ccb6/mysql-bootstrap/0.log" Dec 04 18:52:16 crc kubenswrapper[4631]: I1204 18:52:16.047057 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_0c7df533-7298-4204-aeca-992631c9ccb6/mysql-bootstrap/0.log" Dec 04 18:52:16 crc kubenswrapper[4631]: I1204 18:52:16.053116 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_0c7df533-7298-4204-aeca-992631c9ccb6/galera/0.log" Dec 04 18:52:16 crc kubenswrapper[4631]: I1204 18:52:16.350441 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_c65652e6-704f-4f88-9b9d-435868d33e0e/openstackclient/0.log" Dec 04 18:52:16 crc kubenswrapper[4631]: I1204 18:52:16.492260 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_df12d5a5-6083-4b46-b6bb-8894eb4f421b/nova-metadata-metadata/0.log" Dec 04 18:52:16 crc kubenswrapper[4631]: I1204 18:52:16.550115 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-2vnfm_10032f10-bb41-4039-a44d-ca336b45d4df/ovn-controller/0.log" Dec 04 18:52:16 crc kubenswrapper[4631]: I1204 18:52:16.925188 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-dnt84_ee500515-c2eb-4f8e-b022-1d4f1bb8106e/openstack-network-exporter/0.log" Dec 04 18:52:17 crc kubenswrapper[4631]: I1204 18:52:17.102270 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gsp75_8d34f815-7011-438b-8c8c-45363f359101/ovsdb-server-init/0.log" Dec 04 18:52:17 crc kubenswrapper[4631]: I1204 18:52:17.275606 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gsp75_8d34f815-7011-438b-8c8c-45363f359101/ovsdb-server-init/0.log" Dec 04 18:52:17 crc kubenswrapper[4631]: I1204 18:52:17.280898 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gsp75_8d34f815-7011-438b-8c8c-45363f359101/ovs-vswitchd/0.log" Dec 04 18:52:17 crc kubenswrapper[4631]: I1204 18:52:17.332111 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gsp75_8d34f815-7011-438b-8c8c-45363f359101/ovsdb-server/0.log" Dec 04 18:52:17 crc kubenswrapper[4631]: I1204 18:52:17.563394 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-qspr8_e9c5fea5-b0f9-4894-bf45-699c8b23d9f1/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:52:17 crc kubenswrapper[4631]: I1204 18:52:17.582552 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_5a597650-5fec-493b-bda8-93bb60985ae5/openstack-network-exporter/0.log" Dec 04 18:52:17 crc kubenswrapper[4631]: I1204 18:52:17.849259 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_5a597650-5fec-493b-bda8-93bb60985ae5/ovn-northd/0.log" Dec 04 18:52:17 crc kubenswrapper[4631]: I1204 18:52:17.893394 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_fe7546ca-3ffc-4d40-b075-00254781f008/openstack-network-exporter/0.log" Dec 04 18:52:17 crc kubenswrapper[4631]: I1204 18:52:17.945596 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_fe7546ca-3ffc-4d40-b075-00254781f008/ovsdbserver-nb/0.log" Dec 04 18:52:18 crc kubenswrapper[4631]: I1204 18:52:18.235640 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b0339f65-9966-4790-a7d2-954145c70f7b/openstack-network-exporter/0.log" Dec 04 18:52:18 crc kubenswrapper[4631]: I1204 18:52:18.266081 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b0339f65-9966-4790-a7d2-954145c70f7b/ovsdbserver-sb/0.log" Dec 04 18:52:18 crc kubenswrapper[4631]: I1204 18:52:18.600020 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1ba76133-7ea9-4b93-abdd-426b64c09c9d/setup-container/0.log" Dec 04 18:52:18 crc kubenswrapper[4631]: I1204 18:52:18.687875 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-776f95766d-5qctj_4b648789-3c38-485a-ad71-70566e8684fb/placement-api/0.log" Dec 04 18:52:18 crc kubenswrapper[4631]: I1204 18:52:18.785399 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-776f95766d-5qctj_4b648789-3c38-485a-ad71-70566e8684fb/placement-log/0.log" Dec 04 18:52:19 crc kubenswrapper[4631]: I1204 18:52:19.016793 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1ba76133-7ea9-4b93-abdd-426b64c09c9d/setup-container/0.log" Dec 04 18:52:19 crc kubenswrapper[4631]: I1204 18:52:19.056890 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1ba76133-7ea9-4b93-abdd-426b64c09c9d/rabbitmq/0.log" Dec 04 18:52:19 crc kubenswrapper[4631]: I1204 18:52:19.136852 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9ef0c479-0169-423e-9619-fbf9f7e63a97/setup-container/0.log" Dec 04 18:52:19 crc kubenswrapper[4631]: I1204 18:52:19.316425 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9ef0c479-0169-423e-9619-fbf9f7e63a97/setup-container/0.log" Dec 04 18:52:19 crc kubenswrapper[4631]: I1204 18:52:19.330134 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_9ef0c479-0169-423e-9619-fbf9f7e63a97/rabbitmq/0.log" Dec 04 18:52:19 crc kubenswrapper[4631]: I1204 18:52:19.404007 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-zltmj_c2ec68e5-0f90-46f3-b0f7-1fdc8956c306/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:52:19 crc kubenswrapper[4631]: I1204 18:52:19.597027 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-5j289_71ccfaca-2557-4840-941c-a36d55ebd0bc/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:52:19 crc kubenswrapper[4631]: I1204 18:52:19.785069 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-5fhjg_583c1d0c-fb4d-4d25-9d84-798d63586401/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:52:20 crc kubenswrapper[4631]: I1204 18:52:20.280065 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-p2h9d_c331e4d1-1da9-4a7f-bd67-f24a4c76b971/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:52:20 crc kubenswrapper[4631]: I1204 18:52:20.424340 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-8c42n_6d2a06a0-e76d-469a-bf34-4d32dd8b0b84/ssh-known-hosts-edpm-deployment/0.log" Dec 04 18:52:20 crc kubenswrapper[4631]: I1204 18:52:20.638454 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5dd47bd8d5-qcz5l_2f9535d3-d81d-4e55-bc05-f36a8dd6b731/proxy-server/0.log" Dec 04 18:52:20 crc kubenswrapper[4631]: I1204 18:52:20.711118 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5dd47bd8d5-qcz5l_2f9535d3-d81d-4e55-bc05-f36a8dd6b731/proxy-httpd/0.log" Dec 04 18:52:20 crc kubenswrapper[4631]: I1204 18:52:20.720182 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-xjxf8_23cc29b2-48d6-42f1-a2ff-fbd418d0b47f/swift-ring-rebalance/0.log" Dec 04 18:52:20 crc kubenswrapper[4631]: I1204 18:52:20.976641 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/account-auditor/0.log" Dec 04 18:52:20 crc kubenswrapper[4631]: I1204 18:52:20.977472 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/account-reaper/0.log" Dec 04 18:52:21 crc kubenswrapper[4631]: I1204 18:52:21.126480 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/account-replicator/0.log" Dec 04 18:52:21 crc kubenswrapper[4631]: I1204 18:52:21.241628 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/container-replicator/0.log" Dec 04 18:52:21 crc kubenswrapper[4631]: I1204 18:52:21.254234 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/account-server/0.log" Dec 04 18:52:21 crc kubenswrapper[4631]: I1204 18:52:21.271042 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/container-auditor/0.log" Dec 04 18:52:21 crc kubenswrapper[4631]: I1204 18:52:21.331530 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/container-server/0.log" Dec 04 18:52:21 crc kubenswrapper[4631]: I1204 18:52:21.484050 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/container-updater/0.log" Dec 04 18:52:21 crc kubenswrapper[4631]: I1204 18:52:21.554755 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/object-auditor/0.log" Dec 04 18:52:21 crc kubenswrapper[4631]: I1204 18:52:21.576093 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/object-expirer/0.log" Dec 04 18:52:21 crc kubenswrapper[4631]: I1204 18:52:21.665505 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/object-replicator/0.log" Dec 04 18:52:21 crc kubenswrapper[4631]: I1204 18:52:21.737644 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/object-server/0.log" Dec 04 18:52:21 crc kubenswrapper[4631]: I1204 18:52:21.860760 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/rsync/0.log" Dec 04 18:52:21 crc kubenswrapper[4631]: I1204 18:52:21.870307 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/object-updater/0.log" Dec 04 18:52:21 crc kubenswrapper[4631]: I1204 18:52:21.890515 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8acd1342-fa9f-43be-9c9f-28739a5aed78/swift-recon-cron/0.log" Dec 04 18:52:22 crc kubenswrapper[4631]: I1204 18:52:22.189187 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-hqdw6_15251242-87d0-444d-aa7f-f0b8936efd96/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:52:22 crc kubenswrapper[4631]: I1204 18:52:22.231554 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_36e446e6-248d-4a69-80f1-585a9bfcd4cf/tempest-tests-tempest-tests-runner/0.log" Dec 04 18:52:22 crc kubenswrapper[4631]: I1204 18:52:22.415124 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_cc75bbe8-1619-48a6-8dd8-4353f50fac82/test-operator-logs-container/0.log" Dec 04 18:52:22 crc kubenswrapper[4631]: I1204 18:52:22.506979 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-28v4d_02fda8da-e708-4897-9997-9c71901e45b7/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 04 18:52:24 crc kubenswrapper[4631]: I1204 18:52:24.245561 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:52:24 crc kubenswrapper[4631]: E1204 18:52:24.245875 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:52:35 crc kubenswrapper[4631]: I1204 18:52:35.245074 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:52:35 crc kubenswrapper[4631]: E1204 18:52:35.246049 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:52:35 crc kubenswrapper[4631]: I1204 18:52:35.489995 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_e136634f-2944-42c8-bd08-517411c92754/memcached/0.log" Dec 04 18:52:46 crc kubenswrapper[4631]: I1204 18:52:46.240018 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:52:46 crc kubenswrapper[4631]: E1204 18:52:46.240819 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:52:52 crc kubenswrapper[4631]: I1204 18:52:52.583545 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs_ad8edd24-7550-455a-b394-343d4e2ca11b/util/0.log" Dec 04 18:52:52 crc kubenswrapper[4631]: I1204 18:52:52.722832 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs_ad8edd24-7550-455a-b394-343d4e2ca11b/util/0.log" Dec 04 18:52:52 crc kubenswrapper[4631]: I1204 18:52:52.758631 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs_ad8edd24-7550-455a-b394-343d4e2ca11b/pull/0.log" Dec 04 18:52:52 crc kubenswrapper[4631]: I1204 18:52:52.793793 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs_ad8edd24-7550-455a-b394-343d4e2ca11b/pull/0.log" Dec 04 18:52:52 crc kubenswrapper[4631]: I1204 18:52:52.962540 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs_ad8edd24-7550-455a-b394-343d4e2ca11b/pull/0.log" Dec 04 18:52:53 crc kubenswrapper[4631]: I1204 18:52:53.017751 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs_ad8edd24-7550-455a-b394-343d4e2ca11b/extract/0.log" Dec 04 18:52:53 crc kubenswrapper[4631]: I1204 18:52:53.070984 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_69e8e537e04f4de464a062ccb6541f4e07f967b09dc2ef87a0d14bac4a54vvs_ad8edd24-7550-455a-b394-343d4e2ca11b/util/0.log" Dec 04 18:52:53 crc kubenswrapper[4631]: I1204 18:52:53.200582 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-67cns_30be0340-cc50-4244-9b27-7e41f86bf113/kube-rbac-proxy/0.log" Dec 04 18:52:53 crc kubenswrapper[4631]: I1204 18:52:53.315986 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-67cns_30be0340-cc50-4244-9b27-7e41f86bf113/manager/0.log" Dec 04 18:52:53 crc kubenswrapper[4631]: I1204 18:52:53.454406 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-9s4jp_709a39e5-9fe0-4861-8761-774f26a4a315/kube-rbac-proxy/0.log" Dec 04 18:52:53 crc kubenswrapper[4631]: I1204 18:52:53.629775 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-9s4jp_709a39e5-9fe0-4861-8761-774f26a4a315/manager/0.log" Dec 04 18:52:53 crc kubenswrapper[4631]: I1204 18:52:53.809671 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-p6vp2_dd2fd0ee-2bee-4cd2-9c24-a0c0dce37b46/kube-rbac-proxy/0.log" Dec 04 18:52:53 crc kubenswrapper[4631]: I1204 18:52:53.900666 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-p6vp2_dd2fd0ee-2bee-4cd2-9c24-a0c0dce37b46/manager/0.log" Dec 04 18:52:54 crc kubenswrapper[4631]: I1204 18:52:54.144702 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-ldclc_32886d79-72a7-4318-8098-718f0f55f61e/manager/0.log" Dec 04 18:52:54 crc kubenswrapper[4631]: I1204 18:52:54.166570 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-ldclc_32886d79-72a7-4318-8098-718f0f55f61e/kube-rbac-proxy/0.log" Dec 04 18:52:54 crc kubenswrapper[4631]: I1204 18:52:54.277036 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-jwf42_1d24a40a-06b2-43e4-9921-05dd2e8f27ea/kube-rbac-proxy/0.log" Dec 04 18:52:54 crc kubenswrapper[4631]: I1204 18:52:54.486361 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-jwf42_1d24a40a-06b2-43e4-9921-05dd2e8f27ea/manager/0.log" Dec 04 18:52:54 crc kubenswrapper[4631]: I1204 18:52:54.528201 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-cz8hk_5a715ee8-c048-4447-b3fc-5f94121c0e7e/kube-rbac-proxy/0.log" Dec 04 18:52:54 crc kubenswrapper[4631]: I1204 18:52:54.617868 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-cz8hk_5a715ee8-c048-4447-b3fc-5f94121c0e7e/manager/0.log" Dec 04 18:52:55 crc kubenswrapper[4631]: I1204 18:52:55.038890 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-57k7z_fdf92431-a279-4eb5-8e5d-56e353febcf2/kube-rbac-proxy/0.log" Dec 04 18:52:55 crc kubenswrapper[4631]: I1204 18:52:55.240308 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-2bzwd_b8212ac4-255e-4de2-ac13-0033682d7550/kube-rbac-proxy/0.log" Dec 04 18:52:55 crc kubenswrapper[4631]: I1204 18:52:55.244007 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-57k7z_fdf92431-a279-4eb5-8e5d-56e353febcf2/manager/0.log" Dec 04 18:52:55 crc kubenswrapper[4631]: I1204 18:52:55.353688 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-2bzwd_b8212ac4-255e-4de2-ac13-0033682d7550/manager/0.log" Dec 04 18:52:55 crc kubenswrapper[4631]: I1204 18:52:55.522428 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-xdm6x_f0089345-8234-4ea7-9fbe-528afe9d5fc0/kube-rbac-proxy/0.log" Dec 04 18:52:55 crc kubenswrapper[4631]: I1204 18:52:55.558177 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-xdm6x_f0089345-8234-4ea7-9fbe-528afe9d5fc0/manager/0.log" Dec 04 18:52:55 crc kubenswrapper[4631]: I1204 18:52:55.644760 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-sw9bk_72f35a0d-fa67-44c7-a25c-b720885d5708/kube-rbac-proxy/0.log" Dec 04 18:52:55 crc kubenswrapper[4631]: I1204 18:52:55.774319 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-sw9bk_72f35a0d-fa67-44c7-a25c-b720885d5708/manager/0.log" Dec 04 18:52:55 crc kubenswrapper[4631]: I1204 18:52:55.937169 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-tmr9d_76c2990b-dff1-4715-8517-28cff884cf12/kube-rbac-proxy/0.log" Dec 04 18:52:55 crc kubenswrapper[4631]: I1204 18:52:55.952241 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-tmr9d_76c2990b-dff1-4715-8517-28cff884cf12/manager/0.log" Dec 04 18:52:56 crc kubenswrapper[4631]: I1204 18:52:56.088259 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-2hbp2_41a5b9af-e0eb-46d8-84f0-0962dd72367c/kube-rbac-proxy/0.log" Dec 04 18:52:56 crc kubenswrapper[4631]: I1204 18:52:56.178584 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-2hbp2_41a5b9af-e0eb-46d8-84f0-0962dd72367c/manager/0.log" Dec 04 18:52:56 crc kubenswrapper[4631]: I1204 18:52:56.242213 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-czzvh_22b6958b-a18a-49c1-b6a4-28b3ebad0846/kube-rbac-proxy/0.log" Dec 04 18:52:56 crc kubenswrapper[4631]: I1204 18:52:56.419727 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-czzvh_22b6958b-a18a-49c1-b6a4-28b3ebad0846/manager/0.log" Dec 04 18:52:56 crc kubenswrapper[4631]: I1204 18:52:56.461168 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-66x65_becd7035-989e-497f-96ad-7eaa0d7e4456/kube-rbac-proxy/0.log" Dec 04 18:52:56 crc kubenswrapper[4631]: I1204 18:52:56.532384 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-66x65_becd7035-989e-497f-96ad-7eaa0d7e4456/manager/0.log" Dec 04 18:52:56 crc kubenswrapper[4631]: I1204 18:52:56.672409 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq_e20ca639-4732-4b27-b2e2-8d4cc9374515/kube-rbac-proxy/0.log" Dec 04 18:52:56 crc kubenswrapper[4631]: I1204 18:52:56.712218 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4cmprq_e20ca639-4732-4b27-b2e2-8d4cc9374515/manager/0.log" Dec 04 18:52:57 crc kubenswrapper[4631]: I1204 18:52:57.045242 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-66bcc8f984-lzslx_75ee8627-c453-43a3-a933-080907b850cc/operator/0.log" Dec 04 18:52:57 crc kubenswrapper[4631]: I1204 18:52:57.321946 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-c9xfp_f1c2db29-609d-4d06-bf5a-702536504419/registry-server/0.log" Dec 04 18:52:57 crc kubenswrapper[4631]: I1204 18:52:57.487508 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-xc5m4_1c44bc20-c171-4476-a959-9e31d9bbac58/kube-rbac-proxy/0.log" Dec 04 18:52:57 crc kubenswrapper[4631]: I1204 18:52:57.672560 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-xc5m4_1c44bc20-c171-4476-a959-9e31d9bbac58/manager/0.log" Dec 04 18:52:57 crc kubenswrapper[4631]: I1204 18:52:57.793809 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-df7fm_7e9e43c6-516b-4195-9d65-e6e80544bb7d/kube-rbac-proxy/0.log" Dec 04 18:52:57 crc kubenswrapper[4631]: I1204 18:52:57.818219 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-df7fm_7e9e43c6-516b-4195-9d65-e6e80544bb7d/manager/0.log" Dec 04 18:52:58 crc kubenswrapper[4631]: I1204 18:52:58.012596 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-f65bcfbd6-zphvh_8eceb916-5479-43f0-a3f4-75d0643adcab/manager/0.log" Dec 04 18:52:58 crc kubenswrapper[4631]: I1204 18:52:58.242038 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:52:58 crc kubenswrapper[4631]: E1204 18:52:58.242260 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:52:58 crc kubenswrapper[4631]: I1204 18:52:58.345755 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-ct9z5_8c4e7e86-5efa-4888-a717-2dcafc489144/operator/0.log" Dec 04 18:52:58 crc kubenswrapper[4631]: I1204 18:52:58.391764 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-grlw8_acce4f1e-311d-44da-aaf9-a2cddc75be35/kube-rbac-proxy/0.log" Dec 04 18:52:58 crc kubenswrapper[4631]: I1204 18:52:58.471681 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-grlw8_acce4f1e-311d-44da-aaf9-a2cddc75be35/manager/0.log" Dec 04 18:52:58 crc kubenswrapper[4631]: I1204 18:52:58.606992 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-f5lq4_f05ded75-e10d-41ed-921d-0ba118f3453d/manager/0.log" Dec 04 18:52:58 crc kubenswrapper[4631]: I1204 18:52:58.635778 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-f5lq4_f05ded75-e10d-41ed-921d-0ba118f3453d/kube-rbac-proxy/0.log" Dec 04 18:52:58 crc kubenswrapper[4631]: I1204 18:52:58.745746 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-r28bs_a19a11a8-a149-4b75-ab68-359723dcfbcb/kube-rbac-proxy/0.log" Dec 04 18:52:58 crc kubenswrapper[4631]: I1204 18:52:58.826466 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-r28bs_a19a11a8-a149-4b75-ab68-359723dcfbcb/manager/0.log" Dec 04 18:52:58 crc kubenswrapper[4631]: I1204 18:52:58.922821 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-bqs5m_7cf50b74-b958-4f66-aefc-2ad897abdec2/kube-rbac-proxy/0.log" Dec 04 18:52:58 crc kubenswrapper[4631]: I1204 18:52:58.964026 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-bqs5m_7cf50b74-b958-4f66-aefc-2ad897abdec2/manager/0.log" Dec 04 18:53:12 crc kubenswrapper[4631]: I1204 18:53:12.239180 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:53:12 crc kubenswrapper[4631]: E1204 18:53:12.240004 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:53:20 crc kubenswrapper[4631]: I1204 18:53:20.697198 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-gswpr_0f92bdbc-4785-44bf-a91c-88fe53b02d2a/control-plane-machine-set-operator/0.log" Dec 04 18:53:20 crc kubenswrapper[4631]: I1204 18:53:20.767498 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-pdgsq_9a38e196-88e0-4add-8e52-40b1d8eb79e9/kube-rbac-proxy/0.log" Dec 04 18:53:20 crc kubenswrapper[4631]: I1204 18:53:20.850930 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-pdgsq_9a38e196-88e0-4add-8e52-40b1d8eb79e9/machine-api-operator/0.log" Dec 04 18:53:26 crc kubenswrapper[4631]: I1204 18:53:26.239338 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:53:26 crc kubenswrapper[4631]: E1204 18:53:26.240122 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:53:34 crc kubenswrapper[4631]: I1204 18:53:34.427242 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-6djk2_ea64b959-aecd-46e1-b2a4-cde17cc753d8/cert-manager-controller/0.log" Dec 04 18:53:34 crc kubenswrapper[4631]: I1204 18:53:34.567781 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-5vb4n_5a3b7e1e-41ff-4029-b2cd-6dc6be40ae3d/cert-manager-cainjector/0.log" Dec 04 18:53:34 crc kubenswrapper[4631]: I1204 18:53:34.607865 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-tffkj_d558fa33-5875-4eb1-80ec-2f5726659b7e/cert-manager-webhook/0.log" Dec 04 18:53:38 crc kubenswrapper[4631]: I1204 18:53:38.240098 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:53:38 crc kubenswrapper[4631]: E1204 18:53:38.241171 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:53:47 crc kubenswrapper[4631]: I1204 18:53:47.485211 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-5cbf9_1bfeccd7-32aa-4315-96df-4d7df3f10767/nmstate-console-plugin/0.log" Dec 04 18:53:47 crc kubenswrapper[4631]: I1204 18:53:47.572179 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-dqwmp_a6bb09d3-0c21-4ed2-9c13-cd9116b8f9f9/nmstate-handler/0.log" Dec 04 18:53:47 crc kubenswrapper[4631]: I1204 18:53:47.671258 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-8pp22_061c2acd-2d43-420a-8c0f-d31fcd0b2d3e/nmstate-metrics/0.log" Dec 04 18:53:47 crc kubenswrapper[4631]: I1204 18:53:47.719708 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-8pp22_061c2acd-2d43-420a-8c0f-d31fcd0b2d3e/kube-rbac-proxy/0.log" Dec 04 18:53:47 crc kubenswrapper[4631]: I1204 18:53:47.934992 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-2b87p_f095f34f-aa8e-4f97-a34d-63fbc8722163/nmstate-operator/0.log" Dec 04 18:53:48 crc kubenswrapper[4631]: I1204 18:53:48.002564 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-kd9nq_6af82021-e54a-415b-963a-3e0ca6f7fd5c/nmstate-webhook/0.log" Dec 04 18:53:50 crc kubenswrapper[4631]: I1204 18:53:50.280037 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:53:50 crc kubenswrapper[4631]: E1204 18:53:50.280818 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:54:05 crc kubenswrapper[4631]: I1204 18:54:05.239192 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:54:05 crc kubenswrapper[4631]: E1204 18:54:05.239959 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:54:05 crc kubenswrapper[4631]: I1204 18:54:05.321660 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-cpxkk_7531a7c8-09d0-470e-b530-227bff4a6659/kube-rbac-proxy/0.log" Dec 04 18:54:05 crc kubenswrapper[4631]: I1204 18:54:05.333525 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-cpxkk_7531a7c8-09d0-470e-b530-227bff4a6659/controller/0.log" Dec 04 18:54:05 crc kubenswrapper[4631]: I1204 18:54:05.638647 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-frr-files/0.log" Dec 04 18:54:05 crc kubenswrapper[4631]: I1204 18:54:05.850652 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-metrics/0.log" Dec 04 18:54:05 crc kubenswrapper[4631]: I1204 18:54:05.948622 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-reloader/0.log" Dec 04 18:54:05 crc kubenswrapper[4631]: I1204 18:54:05.963285 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-reloader/0.log" Dec 04 18:54:05 crc kubenswrapper[4631]: I1204 18:54:05.989433 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-frr-files/0.log" Dec 04 18:54:06 crc kubenswrapper[4631]: I1204 18:54:06.168820 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-frr-files/0.log" Dec 04 18:54:06 crc kubenswrapper[4631]: I1204 18:54:06.202788 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-reloader/0.log" Dec 04 18:54:06 crc kubenswrapper[4631]: I1204 18:54:06.212945 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-metrics/0.log" Dec 04 18:54:06 crc kubenswrapper[4631]: I1204 18:54:06.253274 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-metrics/0.log" Dec 04 18:54:06 crc kubenswrapper[4631]: I1204 18:54:06.456747 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-reloader/0.log" Dec 04 18:54:06 crc kubenswrapper[4631]: I1204 18:54:06.468903 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-frr-files/0.log" Dec 04 18:54:06 crc kubenswrapper[4631]: I1204 18:54:06.471615 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/cp-metrics/0.log" Dec 04 18:54:06 crc kubenswrapper[4631]: I1204 18:54:06.540542 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/controller/0.log" Dec 04 18:54:06 crc kubenswrapper[4631]: I1204 18:54:06.707051 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/kube-rbac-proxy/0.log" Dec 04 18:54:06 crc kubenswrapper[4631]: I1204 18:54:06.755567 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/frr-metrics/0.log" Dec 04 18:54:06 crc kubenswrapper[4631]: I1204 18:54:06.874753 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/kube-rbac-proxy-frr/0.log" Dec 04 18:54:07 crc kubenswrapper[4631]: I1204 18:54:07.002286 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/reloader/0.log" Dec 04 18:54:07 crc kubenswrapper[4631]: I1204 18:54:07.110878 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-rpshr_78c76818-4dc3-4a33-b105-f8194a1cde60/frr-k8s-webhook-server/0.log" Dec 04 18:54:07 crc kubenswrapper[4631]: I1204 18:54:07.413851 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-9d6f9bbbc-h6txj_7628937e-69d4-416b-bf62-0b8cb083c4b1/manager/0.log" Dec 04 18:54:07 crc kubenswrapper[4631]: I1204 18:54:07.609695 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6dfc44c866-psc9v_b471a575-aeaa-473d-a180-161a7c07d2af/webhook-server/0.log" Dec 04 18:54:07 crc kubenswrapper[4631]: I1204 18:54:07.762386 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-45ph8_025cf6e6-5d36-4973-bac3-7cd1046ddeea/kube-rbac-proxy/0.log" Dec 04 18:54:08 crc kubenswrapper[4631]: I1204 18:54:08.047976 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-d7bk6_ac0998d1-a266-4aeb-9af6-a18659dea142/frr/0.log" Dec 04 18:54:08 crc kubenswrapper[4631]: I1204 18:54:08.241303 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-45ph8_025cf6e6-5d36-4973-bac3-7cd1046ddeea/speaker/0.log" Dec 04 18:54:20 crc kubenswrapper[4631]: I1204 18:54:20.245090 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:54:20 crc kubenswrapper[4631]: E1204 18:54:20.248070 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:54:20 crc kubenswrapper[4631]: I1204 18:54:20.799952 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84_12f1788b-0cad-4272-9208-6ed4bd4d2ac0/util/0.log" Dec 04 18:54:21 crc kubenswrapper[4631]: I1204 18:54:21.032879 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84_12f1788b-0cad-4272-9208-6ed4bd4d2ac0/util/0.log" Dec 04 18:54:21 crc kubenswrapper[4631]: I1204 18:54:21.053868 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84_12f1788b-0cad-4272-9208-6ed4bd4d2ac0/pull/0.log" Dec 04 18:54:21 crc kubenswrapper[4631]: I1204 18:54:21.103774 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84_12f1788b-0cad-4272-9208-6ed4bd4d2ac0/pull/0.log" Dec 04 18:54:21 crc kubenswrapper[4631]: I1204 18:54:21.316492 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84_12f1788b-0cad-4272-9208-6ed4bd4d2ac0/util/0.log" Dec 04 18:54:21 crc kubenswrapper[4631]: I1204 18:54:21.321625 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84_12f1788b-0cad-4272-9208-6ed4bd4d2ac0/extract/0.log" Dec 04 18:54:21 crc kubenswrapper[4631]: I1204 18:54:21.322218 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f59f84_12f1788b-0cad-4272-9208-6ed4bd4d2ac0/pull/0.log" Dec 04 18:54:21 crc kubenswrapper[4631]: I1204 18:54:21.500397 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28_104d954a-30c9-401b-8e56-817777e91f38/util/0.log" Dec 04 18:54:21 crc kubenswrapper[4631]: I1204 18:54:21.681373 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28_104d954a-30c9-401b-8e56-817777e91f38/util/0.log" Dec 04 18:54:21 crc kubenswrapper[4631]: I1204 18:54:21.683348 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28_104d954a-30c9-401b-8e56-817777e91f38/pull/0.log" Dec 04 18:54:21 crc kubenswrapper[4631]: I1204 18:54:21.731266 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28_104d954a-30c9-401b-8e56-817777e91f38/pull/0.log" Dec 04 18:54:21 crc kubenswrapper[4631]: I1204 18:54:21.956086 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28_104d954a-30c9-401b-8e56-817777e91f38/extract/0.log" Dec 04 18:54:21 crc kubenswrapper[4631]: I1204 18:54:21.970494 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28_104d954a-30c9-401b-8e56-817777e91f38/pull/0.log" Dec 04 18:54:22 crc kubenswrapper[4631]: I1204 18:54:22.171738 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pxbcf_2cba6dd8-e8de-492c-b25f-f7092a13c72f/extract-utilities/0.log" Dec 04 18:54:22 crc kubenswrapper[4631]: I1204 18:54:22.228769 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83pqk28_104d954a-30c9-401b-8e56-817777e91f38/util/0.log" Dec 04 18:54:22 crc kubenswrapper[4631]: I1204 18:54:22.397348 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pxbcf_2cba6dd8-e8de-492c-b25f-f7092a13c72f/extract-utilities/0.log" Dec 04 18:54:22 crc kubenswrapper[4631]: I1204 18:54:22.410031 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pxbcf_2cba6dd8-e8de-492c-b25f-f7092a13c72f/extract-content/0.log" Dec 04 18:54:22 crc kubenswrapper[4631]: I1204 18:54:22.463971 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pxbcf_2cba6dd8-e8de-492c-b25f-f7092a13c72f/extract-content/0.log" Dec 04 18:54:22 crc kubenswrapper[4631]: I1204 18:54:22.658854 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pxbcf_2cba6dd8-e8de-492c-b25f-f7092a13c72f/extract-utilities/0.log" Dec 04 18:54:22 crc kubenswrapper[4631]: I1204 18:54:22.709316 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pxbcf_2cba6dd8-e8de-492c-b25f-f7092a13c72f/extract-content/0.log" Dec 04 18:54:22 crc kubenswrapper[4631]: I1204 18:54:22.975343 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db4qf_5267c3bf-4068-4178-9e3e-9a24e1c11a5e/extract-utilities/0.log" Dec 04 18:54:23 crc kubenswrapper[4631]: I1204 18:54:23.090395 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pxbcf_2cba6dd8-e8de-492c-b25f-f7092a13c72f/registry-server/0.log" Dec 04 18:54:23 crc kubenswrapper[4631]: I1204 18:54:23.186935 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db4qf_5267c3bf-4068-4178-9e3e-9a24e1c11a5e/extract-utilities/0.log" Dec 04 18:54:23 crc kubenswrapper[4631]: I1204 18:54:23.224863 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db4qf_5267c3bf-4068-4178-9e3e-9a24e1c11a5e/extract-content/0.log" Dec 04 18:54:23 crc kubenswrapper[4631]: I1204 18:54:23.229754 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db4qf_5267c3bf-4068-4178-9e3e-9a24e1c11a5e/extract-content/0.log" Dec 04 18:54:23 crc kubenswrapper[4631]: I1204 18:54:23.508672 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db4qf_5267c3bf-4068-4178-9e3e-9a24e1c11a5e/extract-content/0.log" Dec 04 18:54:23 crc kubenswrapper[4631]: I1204 18:54:23.510037 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db4qf_5267c3bf-4068-4178-9e3e-9a24e1c11a5e/extract-utilities/0.log" Dec 04 18:54:23 crc kubenswrapper[4631]: I1204 18:54:23.804596 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-lpl45_68b9b122-03d1-41c7-8910-62826c1eedbb/marketplace-operator/0.log" Dec 04 18:54:23 crc kubenswrapper[4631]: I1204 18:54:23.874664 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tzprv_575a3ee8-f538-4c96-8067-564def2cc3ff/extract-utilities/0.log" Dec 04 18:54:24 crc kubenswrapper[4631]: I1204 18:54:24.163747 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tzprv_575a3ee8-f538-4c96-8067-564def2cc3ff/extract-content/0.log" Dec 04 18:54:24 crc kubenswrapper[4631]: I1204 18:54:24.304527 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tzprv_575a3ee8-f538-4c96-8067-564def2cc3ff/extract-content/0.log" Dec 04 18:54:24 crc kubenswrapper[4631]: I1204 18:54:24.307927 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-db4qf_5267c3bf-4068-4178-9e3e-9a24e1c11a5e/registry-server/0.log" Dec 04 18:54:24 crc kubenswrapper[4631]: I1204 18:54:24.324529 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tzprv_575a3ee8-f538-4c96-8067-564def2cc3ff/extract-utilities/0.log" Dec 04 18:54:24 crc kubenswrapper[4631]: I1204 18:54:24.489459 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tzprv_575a3ee8-f538-4c96-8067-564def2cc3ff/extract-content/0.log" Dec 04 18:54:24 crc kubenswrapper[4631]: I1204 18:54:24.495632 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tzprv_575a3ee8-f538-4c96-8067-564def2cc3ff/extract-utilities/0.log" Dec 04 18:54:24 crc kubenswrapper[4631]: I1204 18:54:24.730055 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cfh7w_e28d58eb-8c1a-4e76-87fc-aefb35295f30/extract-utilities/0.log" Dec 04 18:54:24 crc kubenswrapper[4631]: I1204 18:54:24.761096 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-tzprv_575a3ee8-f538-4c96-8067-564def2cc3ff/registry-server/0.log" Dec 04 18:54:24 crc kubenswrapper[4631]: I1204 18:54:24.944768 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cfh7w_e28d58eb-8c1a-4e76-87fc-aefb35295f30/extract-utilities/0.log" Dec 04 18:54:25 crc kubenswrapper[4631]: I1204 18:54:25.156421 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cfh7w_e28d58eb-8c1a-4e76-87fc-aefb35295f30/extract-content/0.log" Dec 04 18:54:25 crc kubenswrapper[4631]: I1204 18:54:25.180484 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cfh7w_e28d58eb-8c1a-4e76-87fc-aefb35295f30/extract-content/0.log" Dec 04 18:54:25 crc kubenswrapper[4631]: I1204 18:54:25.442913 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cfh7w_e28d58eb-8c1a-4e76-87fc-aefb35295f30/extract-content/0.log" Dec 04 18:54:25 crc kubenswrapper[4631]: I1204 18:54:25.466916 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cfh7w_e28d58eb-8c1a-4e76-87fc-aefb35295f30/extract-utilities/0.log" Dec 04 18:54:26 crc kubenswrapper[4631]: I1204 18:54:26.087522 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-cfh7w_e28d58eb-8c1a-4e76-87fc-aefb35295f30/registry-server/0.log" Dec 04 18:54:33 crc kubenswrapper[4631]: I1204 18:54:33.239595 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:54:33 crc kubenswrapper[4631]: E1204 18:54:33.240558 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:54:48 crc kubenswrapper[4631]: I1204 18:54:48.244217 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:54:48 crc kubenswrapper[4631]: E1204 18:54:48.244913 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:54:52 crc kubenswrapper[4631]: E1204 18:54:52.641036 4631 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.194:37252->38.102.83.194:39691: read tcp 38.102.83.194:37252->38.102.83.194:39691: read: connection reset by peer Dec 04 18:55:02 crc kubenswrapper[4631]: I1204 18:55:02.239340 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:55:02 crc kubenswrapper[4631]: E1204 18:55:02.240093 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:55:16 crc kubenswrapper[4631]: I1204 18:55:16.240161 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:55:16 crc kubenswrapper[4631]: E1204 18:55:16.241072 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:55:30 crc kubenswrapper[4631]: I1204 18:55:30.254420 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:55:30 crc kubenswrapper[4631]: E1204 18:55:30.255216 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:55:33 crc kubenswrapper[4631]: I1204 18:55:33.239685 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-q7w4q"] Dec 04 18:55:33 crc kubenswrapper[4631]: E1204 18:55:33.240306 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46cccdfa-b640-4130-846e-c4fd582c511b" containerName="container-00" Dec 04 18:55:33 crc kubenswrapper[4631]: I1204 18:55:33.240317 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="46cccdfa-b640-4130-846e-c4fd582c511b" containerName="container-00" Dec 04 18:55:33 crc kubenswrapper[4631]: I1204 18:55:33.240550 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="46cccdfa-b640-4130-846e-c4fd582c511b" containerName="container-00" Dec 04 18:55:33 crc kubenswrapper[4631]: I1204 18:55:33.241846 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q7w4q" Dec 04 18:55:33 crc kubenswrapper[4631]: I1204 18:55:33.263052 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q7w4q"] Dec 04 18:55:33 crc kubenswrapper[4631]: I1204 18:55:33.324197 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvm2p\" (UniqueName: \"kubernetes.io/projected/2024b526-fb71-4744-be30-145d9d6b596d-kube-api-access-zvm2p\") pod \"redhat-operators-q7w4q\" (UID: \"2024b526-fb71-4744-be30-145d9d6b596d\") " pod="openshift-marketplace/redhat-operators-q7w4q" Dec 04 18:55:33 crc kubenswrapper[4631]: I1204 18:55:33.324610 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2024b526-fb71-4744-be30-145d9d6b596d-catalog-content\") pod \"redhat-operators-q7w4q\" (UID: \"2024b526-fb71-4744-be30-145d9d6b596d\") " pod="openshift-marketplace/redhat-operators-q7w4q" Dec 04 18:55:33 crc kubenswrapper[4631]: I1204 18:55:33.324641 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2024b526-fb71-4744-be30-145d9d6b596d-utilities\") pod \"redhat-operators-q7w4q\" (UID: \"2024b526-fb71-4744-be30-145d9d6b596d\") " pod="openshift-marketplace/redhat-operators-q7w4q" Dec 04 18:55:33 crc kubenswrapper[4631]: I1204 18:55:33.425940 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2024b526-fb71-4744-be30-145d9d6b596d-catalog-content\") pod \"redhat-operators-q7w4q\" (UID: \"2024b526-fb71-4744-be30-145d9d6b596d\") " pod="openshift-marketplace/redhat-operators-q7w4q" Dec 04 18:55:33 crc kubenswrapper[4631]: I1204 18:55:33.425988 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2024b526-fb71-4744-be30-145d9d6b596d-utilities\") pod \"redhat-operators-q7w4q\" (UID: \"2024b526-fb71-4744-be30-145d9d6b596d\") " pod="openshift-marketplace/redhat-operators-q7w4q" Dec 04 18:55:33 crc kubenswrapper[4631]: I1204 18:55:33.426119 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvm2p\" (UniqueName: \"kubernetes.io/projected/2024b526-fb71-4744-be30-145d9d6b596d-kube-api-access-zvm2p\") pod \"redhat-operators-q7w4q\" (UID: \"2024b526-fb71-4744-be30-145d9d6b596d\") " pod="openshift-marketplace/redhat-operators-q7w4q" Dec 04 18:55:33 crc kubenswrapper[4631]: I1204 18:55:33.426652 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2024b526-fb71-4744-be30-145d9d6b596d-catalog-content\") pod \"redhat-operators-q7w4q\" (UID: \"2024b526-fb71-4744-be30-145d9d6b596d\") " pod="openshift-marketplace/redhat-operators-q7w4q" Dec 04 18:55:33 crc kubenswrapper[4631]: I1204 18:55:33.426677 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2024b526-fb71-4744-be30-145d9d6b596d-utilities\") pod \"redhat-operators-q7w4q\" (UID: \"2024b526-fb71-4744-be30-145d9d6b596d\") " pod="openshift-marketplace/redhat-operators-q7w4q" Dec 04 18:55:33 crc kubenswrapper[4631]: I1204 18:55:33.447279 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvm2p\" (UniqueName: \"kubernetes.io/projected/2024b526-fb71-4744-be30-145d9d6b596d-kube-api-access-zvm2p\") pod \"redhat-operators-q7w4q\" (UID: \"2024b526-fb71-4744-be30-145d9d6b596d\") " pod="openshift-marketplace/redhat-operators-q7w4q" Dec 04 18:55:33 crc kubenswrapper[4631]: I1204 18:55:33.568765 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q7w4q" Dec 04 18:55:34 crc kubenswrapper[4631]: I1204 18:55:34.089120 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q7w4q"] Dec 04 18:55:34 crc kubenswrapper[4631]: I1204 18:55:34.809023 4631 generic.go:334] "Generic (PLEG): container finished" podID="2024b526-fb71-4744-be30-145d9d6b596d" containerID="03fbcb3654a848e9bbfb4fedaba773f76c0f21f874a82fde1e4c4988f45a7b0a" exitCode=0 Dec 04 18:55:34 crc kubenswrapper[4631]: I1204 18:55:34.809274 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7w4q" event={"ID":"2024b526-fb71-4744-be30-145d9d6b596d","Type":"ContainerDied","Data":"03fbcb3654a848e9bbfb4fedaba773f76c0f21f874a82fde1e4c4988f45a7b0a"} Dec 04 18:55:34 crc kubenswrapper[4631]: I1204 18:55:34.809300 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7w4q" event={"ID":"2024b526-fb71-4744-be30-145d9d6b596d","Type":"ContainerStarted","Data":"35c7a6d0c941f2628dd8b6d96319aacac4a18335684c98e598709793b2ffc30a"} Dec 04 18:55:34 crc kubenswrapper[4631]: I1204 18:55:34.810852 4631 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 18:55:35 crc kubenswrapper[4631]: I1204 18:55:35.819624 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7w4q" event={"ID":"2024b526-fb71-4744-be30-145d9d6b596d","Type":"ContainerStarted","Data":"f914a5ef89ecb95413b3d757e13eda2ba5187b8ccf18cedb309ebafb47ac8dcf"} Dec 04 18:55:38 crc kubenswrapper[4631]: I1204 18:55:38.874112 4631 generic.go:334] "Generic (PLEG): container finished" podID="2024b526-fb71-4744-be30-145d9d6b596d" containerID="f914a5ef89ecb95413b3d757e13eda2ba5187b8ccf18cedb309ebafb47ac8dcf" exitCode=0 Dec 04 18:55:38 crc kubenswrapper[4631]: I1204 18:55:38.874681 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7w4q" event={"ID":"2024b526-fb71-4744-be30-145d9d6b596d","Type":"ContainerDied","Data":"f914a5ef89ecb95413b3d757e13eda2ba5187b8ccf18cedb309ebafb47ac8dcf"} Dec 04 18:55:39 crc kubenswrapper[4631]: I1204 18:55:39.885438 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7w4q" event={"ID":"2024b526-fb71-4744-be30-145d9d6b596d","Type":"ContainerStarted","Data":"7881a0054b80fcd159050da691f9fffc29a340c4d44c282ce38a0a068e922b86"} Dec 04 18:55:39 crc kubenswrapper[4631]: I1204 18:55:39.908318 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-q7w4q" podStartSLOduration=2.056450522 podStartE2EDuration="6.908300321s" podCreationTimestamp="2025-12-04 18:55:33 +0000 UTC" firstStartedPulling="2025-12-04 18:55:34.810670365 +0000 UTC m=+5264.842912363" lastFinishedPulling="2025-12-04 18:55:39.662520164 +0000 UTC m=+5269.694762162" observedRunningTime="2025-12-04 18:55:39.907465068 +0000 UTC m=+5269.939707086" watchObservedRunningTime="2025-12-04 18:55:39.908300321 +0000 UTC m=+5269.940542319" Dec 04 18:55:41 crc kubenswrapper[4631]: I1204 18:55:41.239281 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:55:41 crc kubenswrapper[4631]: E1204 18:55:41.240030 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:55:43 crc kubenswrapper[4631]: I1204 18:55:43.569815 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-q7w4q" Dec 04 18:55:43 crc kubenswrapper[4631]: I1204 18:55:43.570064 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-q7w4q" Dec 04 18:55:44 crc kubenswrapper[4631]: I1204 18:55:44.671020 4631 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-q7w4q" podUID="2024b526-fb71-4744-be30-145d9d6b596d" containerName="registry-server" probeResult="failure" output=< Dec 04 18:55:44 crc kubenswrapper[4631]: timeout: failed to connect service ":50051" within 1s Dec 04 18:55:44 crc kubenswrapper[4631]: > Dec 04 18:55:52 crc kubenswrapper[4631]: I1204 18:55:52.240362 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:55:52 crc kubenswrapper[4631]: E1204 18:55:52.241053 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:55:53 crc kubenswrapper[4631]: I1204 18:55:53.631094 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-q7w4q" Dec 04 18:55:53 crc kubenswrapper[4631]: I1204 18:55:53.693191 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-q7w4q" Dec 04 18:55:53 crc kubenswrapper[4631]: I1204 18:55:53.892777 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-q7w4q"] Dec 04 18:55:55 crc kubenswrapper[4631]: I1204 18:55:55.027207 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-q7w4q" podUID="2024b526-fb71-4744-be30-145d9d6b596d" containerName="registry-server" containerID="cri-o://7881a0054b80fcd159050da691f9fffc29a340c4d44c282ce38a0a068e922b86" gracePeriod=2 Dec 04 18:55:55 crc kubenswrapper[4631]: I1204 18:55:55.474817 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q7w4q" Dec 04 18:55:55 crc kubenswrapper[4631]: I1204 18:55:55.533303 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvm2p\" (UniqueName: \"kubernetes.io/projected/2024b526-fb71-4744-be30-145d9d6b596d-kube-api-access-zvm2p\") pod \"2024b526-fb71-4744-be30-145d9d6b596d\" (UID: \"2024b526-fb71-4744-be30-145d9d6b596d\") " Dec 04 18:55:55 crc kubenswrapper[4631]: I1204 18:55:55.533470 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2024b526-fb71-4744-be30-145d9d6b596d-utilities\") pod \"2024b526-fb71-4744-be30-145d9d6b596d\" (UID: \"2024b526-fb71-4744-be30-145d9d6b596d\") " Dec 04 18:55:55 crc kubenswrapper[4631]: I1204 18:55:55.533548 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2024b526-fb71-4744-be30-145d9d6b596d-catalog-content\") pod \"2024b526-fb71-4744-be30-145d9d6b596d\" (UID: \"2024b526-fb71-4744-be30-145d9d6b596d\") " Dec 04 18:55:55 crc kubenswrapper[4631]: I1204 18:55:55.535261 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2024b526-fb71-4744-be30-145d9d6b596d-utilities" (OuterVolumeSpecName: "utilities") pod "2024b526-fb71-4744-be30-145d9d6b596d" (UID: "2024b526-fb71-4744-be30-145d9d6b596d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:55:55 crc kubenswrapper[4631]: I1204 18:55:55.547609 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2024b526-fb71-4744-be30-145d9d6b596d-kube-api-access-zvm2p" (OuterVolumeSpecName: "kube-api-access-zvm2p") pod "2024b526-fb71-4744-be30-145d9d6b596d" (UID: "2024b526-fb71-4744-be30-145d9d6b596d"). InnerVolumeSpecName "kube-api-access-zvm2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:55:55 crc kubenswrapper[4631]: I1204 18:55:55.635861 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2024b526-fb71-4744-be30-145d9d6b596d-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:55:55 crc kubenswrapper[4631]: I1204 18:55:55.636147 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvm2p\" (UniqueName: \"kubernetes.io/projected/2024b526-fb71-4744-be30-145d9d6b596d-kube-api-access-zvm2p\") on node \"crc\" DevicePath \"\"" Dec 04 18:55:55 crc kubenswrapper[4631]: I1204 18:55:55.641122 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2024b526-fb71-4744-be30-145d9d6b596d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2024b526-fb71-4744-be30-145d9d6b596d" (UID: "2024b526-fb71-4744-be30-145d9d6b596d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:55:55 crc kubenswrapper[4631]: I1204 18:55:55.737549 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2024b526-fb71-4744-be30-145d9d6b596d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:55:56 crc kubenswrapper[4631]: I1204 18:55:56.048964 4631 generic.go:334] "Generic (PLEG): container finished" podID="2024b526-fb71-4744-be30-145d9d6b596d" containerID="7881a0054b80fcd159050da691f9fffc29a340c4d44c282ce38a0a068e922b86" exitCode=0 Dec 04 18:55:56 crc kubenswrapper[4631]: I1204 18:55:56.049078 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q7w4q" Dec 04 18:55:56 crc kubenswrapper[4631]: I1204 18:55:56.049096 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7w4q" event={"ID":"2024b526-fb71-4744-be30-145d9d6b596d","Type":"ContainerDied","Data":"7881a0054b80fcd159050da691f9fffc29a340c4d44c282ce38a0a068e922b86"} Dec 04 18:55:56 crc kubenswrapper[4631]: I1204 18:55:56.051439 4631 scope.go:117] "RemoveContainer" containerID="7881a0054b80fcd159050da691f9fffc29a340c4d44c282ce38a0a068e922b86" Dec 04 18:55:56 crc kubenswrapper[4631]: I1204 18:55:56.051276 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7w4q" event={"ID":"2024b526-fb71-4744-be30-145d9d6b596d","Type":"ContainerDied","Data":"35c7a6d0c941f2628dd8b6d96319aacac4a18335684c98e598709793b2ffc30a"} Dec 04 18:55:56 crc kubenswrapper[4631]: I1204 18:55:56.088031 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-q7w4q"] Dec 04 18:55:56 crc kubenswrapper[4631]: I1204 18:55:56.095861 4631 scope.go:117] "RemoveContainer" containerID="f914a5ef89ecb95413b3d757e13eda2ba5187b8ccf18cedb309ebafb47ac8dcf" Dec 04 18:55:56 crc kubenswrapper[4631]: I1204 18:55:56.101900 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-q7w4q"] Dec 04 18:55:56 crc kubenswrapper[4631]: I1204 18:55:56.116614 4631 scope.go:117] "RemoveContainer" containerID="03fbcb3654a848e9bbfb4fedaba773f76c0f21f874a82fde1e4c4988f45a7b0a" Dec 04 18:55:56 crc kubenswrapper[4631]: I1204 18:55:56.166247 4631 scope.go:117] "RemoveContainer" containerID="7881a0054b80fcd159050da691f9fffc29a340c4d44c282ce38a0a068e922b86" Dec 04 18:55:56 crc kubenswrapper[4631]: E1204 18:55:56.166843 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7881a0054b80fcd159050da691f9fffc29a340c4d44c282ce38a0a068e922b86\": container with ID starting with 7881a0054b80fcd159050da691f9fffc29a340c4d44c282ce38a0a068e922b86 not found: ID does not exist" containerID="7881a0054b80fcd159050da691f9fffc29a340c4d44c282ce38a0a068e922b86" Dec 04 18:55:56 crc kubenswrapper[4631]: I1204 18:55:56.166891 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7881a0054b80fcd159050da691f9fffc29a340c4d44c282ce38a0a068e922b86"} err="failed to get container status \"7881a0054b80fcd159050da691f9fffc29a340c4d44c282ce38a0a068e922b86\": rpc error: code = NotFound desc = could not find container \"7881a0054b80fcd159050da691f9fffc29a340c4d44c282ce38a0a068e922b86\": container with ID starting with 7881a0054b80fcd159050da691f9fffc29a340c4d44c282ce38a0a068e922b86 not found: ID does not exist" Dec 04 18:55:56 crc kubenswrapper[4631]: I1204 18:55:56.166924 4631 scope.go:117] "RemoveContainer" containerID="f914a5ef89ecb95413b3d757e13eda2ba5187b8ccf18cedb309ebafb47ac8dcf" Dec 04 18:55:56 crc kubenswrapper[4631]: E1204 18:55:56.167324 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f914a5ef89ecb95413b3d757e13eda2ba5187b8ccf18cedb309ebafb47ac8dcf\": container with ID starting with f914a5ef89ecb95413b3d757e13eda2ba5187b8ccf18cedb309ebafb47ac8dcf not found: ID does not exist" containerID="f914a5ef89ecb95413b3d757e13eda2ba5187b8ccf18cedb309ebafb47ac8dcf" Dec 04 18:55:56 crc kubenswrapper[4631]: I1204 18:55:56.167345 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f914a5ef89ecb95413b3d757e13eda2ba5187b8ccf18cedb309ebafb47ac8dcf"} err="failed to get container status \"f914a5ef89ecb95413b3d757e13eda2ba5187b8ccf18cedb309ebafb47ac8dcf\": rpc error: code = NotFound desc = could not find container \"f914a5ef89ecb95413b3d757e13eda2ba5187b8ccf18cedb309ebafb47ac8dcf\": container with ID starting with f914a5ef89ecb95413b3d757e13eda2ba5187b8ccf18cedb309ebafb47ac8dcf not found: ID does not exist" Dec 04 18:55:56 crc kubenswrapper[4631]: I1204 18:55:56.167358 4631 scope.go:117] "RemoveContainer" containerID="03fbcb3654a848e9bbfb4fedaba773f76c0f21f874a82fde1e4c4988f45a7b0a" Dec 04 18:55:56 crc kubenswrapper[4631]: E1204 18:55:56.167637 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03fbcb3654a848e9bbfb4fedaba773f76c0f21f874a82fde1e4c4988f45a7b0a\": container with ID starting with 03fbcb3654a848e9bbfb4fedaba773f76c0f21f874a82fde1e4c4988f45a7b0a not found: ID does not exist" containerID="03fbcb3654a848e9bbfb4fedaba773f76c0f21f874a82fde1e4c4988f45a7b0a" Dec 04 18:55:56 crc kubenswrapper[4631]: I1204 18:55:56.167664 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03fbcb3654a848e9bbfb4fedaba773f76c0f21f874a82fde1e4c4988f45a7b0a"} err="failed to get container status \"03fbcb3654a848e9bbfb4fedaba773f76c0f21f874a82fde1e4c4988f45a7b0a\": rpc error: code = NotFound desc = could not find container \"03fbcb3654a848e9bbfb4fedaba773f76c0f21f874a82fde1e4c4988f45a7b0a\": container with ID starting with 03fbcb3654a848e9bbfb4fedaba773f76c0f21f874a82fde1e4c4988f45a7b0a not found: ID does not exist" Dec 04 18:55:56 crc kubenswrapper[4631]: I1204 18:55:56.250327 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2024b526-fb71-4744-be30-145d9d6b596d" path="/var/lib/kubelet/pods/2024b526-fb71-4744-be30-145d9d6b596d/volumes" Dec 04 18:56:04 crc kubenswrapper[4631]: I1204 18:56:04.240752 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:56:04 crc kubenswrapper[4631]: E1204 18:56:04.242590 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:56:17 crc kubenswrapper[4631]: I1204 18:56:17.239539 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:56:17 crc kubenswrapper[4631]: E1204 18:56:17.240237 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:56:24 crc kubenswrapper[4631]: I1204 18:56:24.494833 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-854xp"] Dec 04 18:56:24 crc kubenswrapper[4631]: E1204 18:56:24.495755 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2024b526-fb71-4744-be30-145d9d6b596d" containerName="extract-content" Dec 04 18:56:24 crc kubenswrapper[4631]: I1204 18:56:24.495767 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2024b526-fb71-4744-be30-145d9d6b596d" containerName="extract-content" Dec 04 18:56:24 crc kubenswrapper[4631]: E1204 18:56:24.495804 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2024b526-fb71-4744-be30-145d9d6b596d" containerName="registry-server" Dec 04 18:56:24 crc kubenswrapper[4631]: I1204 18:56:24.495810 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2024b526-fb71-4744-be30-145d9d6b596d" containerName="registry-server" Dec 04 18:56:24 crc kubenswrapper[4631]: E1204 18:56:24.495830 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2024b526-fb71-4744-be30-145d9d6b596d" containerName="extract-utilities" Dec 04 18:56:24 crc kubenswrapper[4631]: I1204 18:56:24.495837 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="2024b526-fb71-4744-be30-145d9d6b596d" containerName="extract-utilities" Dec 04 18:56:24 crc kubenswrapper[4631]: I1204 18:56:24.496029 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="2024b526-fb71-4744-be30-145d9d6b596d" containerName="registry-server" Dec 04 18:56:24 crc kubenswrapper[4631]: I1204 18:56:24.497332 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-854xp" Dec 04 18:56:24 crc kubenswrapper[4631]: I1204 18:56:24.501431 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a2d5e15-e63d-4d20-8ad2-5d51245de9fb-utilities\") pod \"certified-operators-854xp\" (UID: \"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb\") " pod="openshift-marketplace/certified-operators-854xp" Dec 04 18:56:24 crc kubenswrapper[4631]: I1204 18:56:24.501599 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a2d5e15-e63d-4d20-8ad2-5d51245de9fb-catalog-content\") pod \"certified-operators-854xp\" (UID: \"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb\") " pod="openshift-marketplace/certified-operators-854xp" Dec 04 18:56:24 crc kubenswrapper[4631]: I1204 18:56:24.501678 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxvrw\" (UniqueName: \"kubernetes.io/projected/6a2d5e15-e63d-4d20-8ad2-5d51245de9fb-kube-api-access-pxvrw\") pod \"certified-operators-854xp\" (UID: \"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb\") " pod="openshift-marketplace/certified-operators-854xp" Dec 04 18:56:24 crc kubenswrapper[4631]: I1204 18:56:24.518883 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-854xp"] Dec 04 18:56:24 crc kubenswrapper[4631]: I1204 18:56:24.603305 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a2d5e15-e63d-4d20-8ad2-5d51245de9fb-utilities\") pod \"certified-operators-854xp\" (UID: \"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb\") " pod="openshift-marketplace/certified-operators-854xp" Dec 04 18:56:24 crc kubenswrapper[4631]: I1204 18:56:24.603506 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a2d5e15-e63d-4d20-8ad2-5d51245de9fb-catalog-content\") pod \"certified-operators-854xp\" (UID: \"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb\") " pod="openshift-marketplace/certified-operators-854xp" Dec 04 18:56:24 crc kubenswrapper[4631]: I1204 18:56:24.603545 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxvrw\" (UniqueName: \"kubernetes.io/projected/6a2d5e15-e63d-4d20-8ad2-5d51245de9fb-kube-api-access-pxvrw\") pod \"certified-operators-854xp\" (UID: \"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb\") " pod="openshift-marketplace/certified-operators-854xp" Dec 04 18:56:24 crc kubenswrapper[4631]: I1204 18:56:24.603982 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a2d5e15-e63d-4d20-8ad2-5d51245de9fb-catalog-content\") pod \"certified-operators-854xp\" (UID: \"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb\") " pod="openshift-marketplace/certified-operators-854xp" Dec 04 18:56:24 crc kubenswrapper[4631]: I1204 18:56:24.604093 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a2d5e15-e63d-4d20-8ad2-5d51245de9fb-utilities\") pod \"certified-operators-854xp\" (UID: \"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb\") " pod="openshift-marketplace/certified-operators-854xp" Dec 04 18:56:24 crc kubenswrapper[4631]: I1204 18:56:24.626107 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxvrw\" (UniqueName: \"kubernetes.io/projected/6a2d5e15-e63d-4d20-8ad2-5d51245de9fb-kube-api-access-pxvrw\") pod \"certified-operators-854xp\" (UID: \"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb\") " pod="openshift-marketplace/certified-operators-854xp" Dec 04 18:56:24 crc kubenswrapper[4631]: I1204 18:56:24.816120 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-854xp" Dec 04 18:56:25 crc kubenswrapper[4631]: I1204 18:56:25.352878 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-854xp"] Dec 04 18:56:26 crc kubenswrapper[4631]: I1204 18:56:26.328206 4631 generic.go:334] "Generic (PLEG): container finished" podID="6a2d5e15-e63d-4d20-8ad2-5d51245de9fb" containerID="d3c527f57bc819c59ea2cbd6613b5d699ff2b1772e8d0b4c5363b85d608e7e25" exitCode=0 Dec 04 18:56:26 crc kubenswrapper[4631]: I1204 18:56:26.328352 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-854xp" event={"ID":"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb","Type":"ContainerDied","Data":"d3c527f57bc819c59ea2cbd6613b5d699ff2b1772e8d0b4c5363b85d608e7e25"} Dec 04 18:56:26 crc kubenswrapper[4631]: I1204 18:56:26.328544 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-854xp" event={"ID":"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb","Type":"ContainerStarted","Data":"11a0996a2cb42376a4e405c14a8f288ba38a0b3d1778243fc679fd43034cb04e"} Dec 04 18:56:27 crc kubenswrapper[4631]: I1204 18:56:27.336749 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-854xp" event={"ID":"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb","Type":"ContainerStarted","Data":"a801b2bfd9649a14f11123475195956b148a91d4801c3f24078c57601423d2c4"} Dec 04 18:56:28 crc kubenswrapper[4631]: I1204 18:56:28.349641 4631 generic.go:334] "Generic (PLEG): container finished" podID="6a2d5e15-e63d-4d20-8ad2-5d51245de9fb" containerID="a801b2bfd9649a14f11123475195956b148a91d4801c3f24078c57601423d2c4" exitCode=0 Dec 04 18:56:28 crc kubenswrapper[4631]: I1204 18:56:28.349698 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-854xp" event={"ID":"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb","Type":"ContainerDied","Data":"a801b2bfd9649a14f11123475195956b148a91d4801c3f24078c57601423d2c4"} Dec 04 18:56:29 crc kubenswrapper[4631]: I1204 18:56:29.360866 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-854xp" event={"ID":"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb","Type":"ContainerStarted","Data":"36201e61b1dfb9c9c764c0dd09822801350029acdbedeccb5e5145ed01577e89"} Dec 04 18:56:29 crc kubenswrapper[4631]: I1204 18:56:29.385130 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-854xp" podStartSLOduration=2.96845955 podStartE2EDuration="5.385114963s" podCreationTimestamp="2025-12-04 18:56:24 +0000 UTC" firstStartedPulling="2025-12-04 18:56:26.329995626 +0000 UTC m=+5316.362237624" lastFinishedPulling="2025-12-04 18:56:28.746651039 +0000 UTC m=+5318.778893037" observedRunningTime="2025-12-04 18:56:29.379225536 +0000 UTC m=+5319.411467534" watchObservedRunningTime="2025-12-04 18:56:29.385114963 +0000 UTC m=+5319.417356961" Dec 04 18:56:30 crc kubenswrapper[4631]: I1204 18:56:30.239117 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:56:30 crc kubenswrapper[4631]: E1204 18:56:30.239403 4631 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q27wh_openshift-machine-config-operator(fc938ac1-b2a3-4435-bda5-c7be66763a01)\"" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" Dec 04 18:56:34 crc kubenswrapper[4631]: I1204 18:56:34.816576 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-854xp" Dec 04 18:56:34 crc kubenswrapper[4631]: I1204 18:56:34.817083 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-854xp" Dec 04 18:56:34 crc kubenswrapper[4631]: I1204 18:56:34.862262 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-854xp" Dec 04 18:56:35 crc kubenswrapper[4631]: I1204 18:56:35.454354 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-854xp" Dec 04 18:56:35 crc kubenswrapper[4631]: I1204 18:56:35.500625 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-854xp"] Dec 04 18:56:37 crc kubenswrapper[4631]: I1204 18:56:37.428235 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-854xp" podUID="6a2d5e15-e63d-4d20-8ad2-5d51245de9fb" containerName="registry-server" containerID="cri-o://36201e61b1dfb9c9c764c0dd09822801350029acdbedeccb5e5145ed01577e89" gracePeriod=2 Dec 04 18:56:37 crc kubenswrapper[4631]: I1204 18:56:37.911502 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-854xp" Dec 04 18:56:37 crc kubenswrapper[4631]: I1204 18:56:37.991384 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxvrw\" (UniqueName: \"kubernetes.io/projected/6a2d5e15-e63d-4d20-8ad2-5d51245de9fb-kube-api-access-pxvrw\") pod \"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb\" (UID: \"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb\") " Dec 04 18:56:37 crc kubenswrapper[4631]: I1204 18:56:37.991520 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a2d5e15-e63d-4d20-8ad2-5d51245de9fb-utilities\") pod \"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb\" (UID: \"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb\") " Dec 04 18:56:37 crc kubenswrapper[4631]: I1204 18:56:37.991544 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a2d5e15-e63d-4d20-8ad2-5d51245de9fb-catalog-content\") pod \"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb\" (UID: \"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb\") " Dec 04 18:56:37 crc kubenswrapper[4631]: I1204 18:56:37.992769 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a2d5e15-e63d-4d20-8ad2-5d51245de9fb-utilities" (OuterVolumeSpecName: "utilities") pod "6a2d5e15-e63d-4d20-8ad2-5d51245de9fb" (UID: "6a2d5e15-e63d-4d20-8ad2-5d51245de9fb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:56:37 crc kubenswrapper[4631]: I1204 18:56:37.997040 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a2d5e15-e63d-4d20-8ad2-5d51245de9fb-kube-api-access-pxvrw" (OuterVolumeSpecName: "kube-api-access-pxvrw") pod "6a2d5e15-e63d-4d20-8ad2-5d51245de9fb" (UID: "6a2d5e15-e63d-4d20-8ad2-5d51245de9fb"). InnerVolumeSpecName "kube-api-access-pxvrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:56:38 crc kubenswrapper[4631]: I1204 18:56:38.034871 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a2d5e15-e63d-4d20-8ad2-5d51245de9fb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6a2d5e15-e63d-4d20-8ad2-5d51245de9fb" (UID: "6a2d5e15-e63d-4d20-8ad2-5d51245de9fb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:56:38 crc kubenswrapper[4631]: I1204 18:56:38.093637 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxvrw\" (UniqueName: \"kubernetes.io/projected/6a2d5e15-e63d-4d20-8ad2-5d51245de9fb-kube-api-access-pxvrw\") on node \"crc\" DevicePath \"\"" Dec 04 18:56:38 crc kubenswrapper[4631]: I1204 18:56:38.093834 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a2d5e15-e63d-4d20-8ad2-5d51245de9fb-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 18:56:38 crc kubenswrapper[4631]: I1204 18:56:38.093926 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a2d5e15-e63d-4d20-8ad2-5d51245de9fb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 18:56:38 crc kubenswrapper[4631]: I1204 18:56:38.438382 4631 generic.go:334] "Generic (PLEG): container finished" podID="6a2d5e15-e63d-4d20-8ad2-5d51245de9fb" containerID="36201e61b1dfb9c9c764c0dd09822801350029acdbedeccb5e5145ed01577e89" exitCode=0 Dec 04 18:56:38 crc kubenswrapper[4631]: I1204 18:56:38.438392 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-854xp" event={"ID":"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb","Type":"ContainerDied","Data":"36201e61b1dfb9c9c764c0dd09822801350029acdbedeccb5e5145ed01577e89"} Dec 04 18:56:38 crc kubenswrapper[4631]: I1204 18:56:38.438706 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-854xp" event={"ID":"6a2d5e15-e63d-4d20-8ad2-5d51245de9fb","Type":"ContainerDied","Data":"11a0996a2cb42376a4e405c14a8f288ba38a0b3d1778243fc679fd43034cb04e"} Dec 04 18:56:38 crc kubenswrapper[4631]: I1204 18:56:38.438469 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-854xp" Dec 04 18:56:38 crc kubenswrapper[4631]: I1204 18:56:38.438734 4631 scope.go:117] "RemoveContainer" containerID="36201e61b1dfb9c9c764c0dd09822801350029acdbedeccb5e5145ed01577e89" Dec 04 18:56:38 crc kubenswrapper[4631]: I1204 18:56:38.461808 4631 scope.go:117] "RemoveContainer" containerID="a801b2bfd9649a14f11123475195956b148a91d4801c3f24078c57601423d2c4" Dec 04 18:56:38 crc kubenswrapper[4631]: I1204 18:56:38.477019 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-854xp"] Dec 04 18:56:38 crc kubenswrapper[4631]: I1204 18:56:38.490873 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-854xp"] Dec 04 18:56:38 crc kubenswrapper[4631]: I1204 18:56:38.499006 4631 scope.go:117] "RemoveContainer" containerID="d3c527f57bc819c59ea2cbd6613b5d699ff2b1772e8d0b4c5363b85d608e7e25" Dec 04 18:56:38 crc kubenswrapper[4631]: I1204 18:56:38.540127 4631 scope.go:117] "RemoveContainer" containerID="36201e61b1dfb9c9c764c0dd09822801350029acdbedeccb5e5145ed01577e89" Dec 04 18:56:38 crc kubenswrapper[4631]: E1204 18:56:38.540605 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36201e61b1dfb9c9c764c0dd09822801350029acdbedeccb5e5145ed01577e89\": container with ID starting with 36201e61b1dfb9c9c764c0dd09822801350029acdbedeccb5e5145ed01577e89 not found: ID does not exist" containerID="36201e61b1dfb9c9c764c0dd09822801350029acdbedeccb5e5145ed01577e89" Dec 04 18:56:38 crc kubenswrapper[4631]: I1204 18:56:38.540638 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36201e61b1dfb9c9c764c0dd09822801350029acdbedeccb5e5145ed01577e89"} err="failed to get container status \"36201e61b1dfb9c9c764c0dd09822801350029acdbedeccb5e5145ed01577e89\": rpc error: code = NotFound desc = could not find container \"36201e61b1dfb9c9c764c0dd09822801350029acdbedeccb5e5145ed01577e89\": container with ID starting with 36201e61b1dfb9c9c764c0dd09822801350029acdbedeccb5e5145ed01577e89 not found: ID does not exist" Dec 04 18:56:38 crc kubenswrapper[4631]: I1204 18:56:38.540661 4631 scope.go:117] "RemoveContainer" containerID="a801b2bfd9649a14f11123475195956b148a91d4801c3f24078c57601423d2c4" Dec 04 18:56:38 crc kubenswrapper[4631]: E1204 18:56:38.541042 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a801b2bfd9649a14f11123475195956b148a91d4801c3f24078c57601423d2c4\": container with ID starting with a801b2bfd9649a14f11123475195956b148a91d4801c3f24078c57601423d2c4 not found: ID does not exist" containerID="a801b2bfd9649a14f11123475195956b148a91d4801c3f24078c57601423d2c4" Dec 04 18:56:38 crc kubenswrapper[4631]: I1204 18:56:38.541066 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a801b2bfd9649a14f11123475195956b148a91d4801c3f24078c57601423d2c4"} err="failed to get container status \"a801b2bfd9649a14f11123475195956b148a91d4801c3f24078c57601423d2c4\": rpc error: code = NotFound desc = could not find container \"a801b2bfd9649a14f11123475195956b148a91d4801c3f24078c57601423d2c4\": container with ID starting with a801b2bfd9649a14f11123475195956b148a91d4801c3f24078c57601423d2c4 not found: ID does not exist" Dec 04 18:56:38 crc kubenswrapper[4631]: I1204 18:56:38.541083 4631 scope.go:117] "RemoveContainer" containerID="d3c527f57bc819c59ea2cbd6613b5d699ff2b1772e8d0b4c5363b85d608e7e25" Dec 04 18:56:38 crc kubenswrapper[4631]: E1204 18:56:38.541413 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3c527f57bc819c59ea2cbd6613b5d699ff2b1772e8d0b4c5363b85d608e7e25\": container with ID starting with d3c527f57bc819c59ea2cbd6613b5d699ff2b1772e8d0b4c5363b85d608e7e25 not found: ID does not exist" containerID="d3c527f57bc819c59ea2cbd6613b5d699ff2b1772e8d0b4c5363b85d608e7e25" Dec 04 18:56:38 crc kubenswrapper[4631]: I1204 18:56:38.541466 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3c527f57bc819c59ea2cbd6613b5d699ff2b1772e8d0b4c5363b85d608e7e25"} err="failed to get container status \"d3c527f57bc819c59ea2cbd6613b5d699ff2b1772e8d0b4c5363b85d608e7e25\": rpc error: code = NotFound desc = could not find container \"d3c527f57bc819c59ea2cbd6613b5d699ff2b1772e8d0b4c5363b85d608e7e25\": container with ID starting with d3c527f57bc819c59ea2cbd6613b5d699ff2b1772e8d0b4c5363b85d608e7e25 not found: ID does not exist" Dec 04 18:56:40 crc kubenswrapper[4631]: I1204 18:56:40.251700 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a2d5e15-e63d-4d20-8ad2-5d51245de9fb" path="/var/lib/kubelet/pods/6a2d5e15-e63d-4d20-8ad2-5d51245de9fb/volumes" Dec 04 18:56:41 crc kubenswrapper[4631]: I1204 18:56:41.467613 4631 generic.go:334] "Generic (PLEG): container finished" podID="be06e218-e114-444c-9682-dd7d8a0feb28" containerID="4a7a003e274ba374eefb0f52d41119e54bab408aa51ff86d9ee9911ad861b7a4" exitCode=0 Dec 04 18:56:41 crc kubenswrapper[4631]: I1204 18:56:41.467654 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wvsck/must-gather-5htd9" event={"ID":"be06e218-e114-444c-9682-dd7d8a0feb28","Type":"ContainerDied","Data":"4a7a003e274ba374eefb0f52d41119e54bab408aa51ff86d9ee9911ad861b7a4"} Dec 04 18:56:41 crc kubenswrapper[4631]: I1204 18:56:41.468176 4631 scope.go:117] "RemoveContainer" containerID="4a7a003e274ba374eefb0f52d41119e54bab408aa51ff86d9ee9911ad861b7a4" Dec 04 18:56:41 crc kubenswrapper[4631]: I1204 18:56:41.796876 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wvsck_must-gather-5htd9_be06e218-e114-444c-9682-dd7d8a0feb28/gather/0.log" Dec 04 18:56:44 crc kubenswrapper[4631]: I1204 18:56:44.243480 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 18:56:44 crc kubenswrapper[4631]: I1204 18:56:44.511477 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"7c0ecddc840f759bcfc606f605243b760d75032e9ed362021a89b368cf4a2018"} Dec 04 18:56:54 crc kubenswrapper[4631]: I1204 18:56:54.486715 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wvsck/must-gather-5htd9"] Dec 04 18:56:54 crc kubenswrapper[4631]: I1204 18:56:54.487461 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-wvsck/must-gather-5htd9" podUID="be06e218-e114-444c-9682-dd7d8a0feb28" containerName="copy" containerID="cri-o://396ea6ec20b0f113f8640333c0ace13284a2d5889e081f59f9abe179d1992864" gracePeriod=2 Dec 04 18:56:54 crc kubenswrapper[4631]: I1204 18:56:54.547298 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wvsck/must-gather-5htd9"] Dec 04 18:56:54 crc kubenswrapper[4631]: I1204 18:56:54.944583 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wvsck_must-gather-5htd9_be06e218-e114-444c-9682-dd7d8a0feb28/copy/0.log" Dec 04 18:56:54 crc kubenswrapper[4631]: I1204 18:56:54.945496 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wvsck/must-gather-5htd9" Dec 04 18:56:54 crc kubenswrapper[4631]: I1204 18:56:54.996142 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8t2h\" (UniqueName: \"kubernetes.io/projected/be06e218-e114-444c-9682-dd7d8a0feb28-kube-api-access-d8t2h\") pod \"be06e218-e114-444c-9682-dd7d8a0feb28\" (UID: \"be06e218-e114-444c-9682-dd7d8a0feb28\") " Dec 04 18:56:54 crc kubenswrapper[4631]: I1204 18:56:54.996305 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/be06e218-e114-444c-9682-dd7d8a0feb28-must-gather-output\") pod \"be06e218-e114-444c-9682-dd7d8a0feb28\" (UID: \"be06e218-e114-444c-9682-dd7d8a0feb28\") " Dec 04 18:56:55 crc kubenswrapper[4631]: I1204 18:56:55.003592 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be06e218-e114-444c-9682-dd7d8a0feb28-kube-api-access-d8t2h" (OuterVolumeSpecName: "kube-api-access-d8t2h") pod "be06e218-e114-444c-9682-dd7d8a0feb28" (UID: "be06e218-e114-444c-9682-dd7d8a0feb28"). InnerVolumeSpecName "kube-api-access-d8t2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 18:56:55 crc kubenswrapper[4631]: I1204 18:56:55.098930 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8t2h\" (UniqueName: \"kubernetes.io/projected/be06e218-e114-444c-9682-dd7d8a0feb28-kube-api-access-d8t2h\") on node \"crc\" DevicePath \"\"" Dec 04 18:56:55 crc kubenswrapper[4631]: I1204 18:56:55.156412 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be06e218-e114-444c-9682-dd7d8a0feb28-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "be06e218-e114-444c-9682-dd7d8a0feb28" (UID: "be06e218-e114-444c-9682-dd7d8a0feb28"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 18:56:55 crc kubenswrapper[4631]: I1204 18:56:55.200593 4631 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/be06e218-e114-444c-9682-dd7d8a0feb28-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 04 18:56:55 crc kubenswrapper[4631]: I1204 18:56:55.665331 4631 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wvsck_must-gather-5htd9_be06e218-e114-444c-9682-dd7d8a0feb28/copy/0.log" Dec 04 18:56:55 crc kubenswrapper[4631]: I1204 18:56:55.668669 4631 generic.go:334] "Generic (PLEG): container finished" podID="be06e218-e114-444c-9682-dd7d8a0feb28" containerID="396ea6ec20b0f113f8640333c0ace13284a2d5889e081f59f9abe179d1992864" exitCode=143 Dec 04 18:56:55 crc kubenswrapper[4631]: I1204 18:56:55.668736 4631 scope.go:117] "RemoveContainer" containerID="396ea6ec20b0f113f8640333c0ace13284a2d5889e081f59f9abe179d1992864" Dec 04 18:56:55 crc kubenswrapper[4631]: I1204 18:56:55.668739 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wvsck/must-gather-5htd9" Dec 04 18:56:55 crc kubenswrapper[4631]: I1204 18:56:55.687008 4631 scope.go:117] "RemoveContainer" containerID="4a7a003e274ba374eefb0f52d41119e54bab408aa51ff86d9ee9911ad861b7a4" Dec 04 18:56:55 crc kubenswrapper[4631]: I1204 18:56:55.733331 4631 scope.go:117] "RemoveContainer" containerID="396ea6ec20b0f113f8640333c0ace13284a2d5889e081f59f9abe179d1992864" Dec 04 18:56:55 crc kubenswrapper[4631]: E1204 18:56:55.733934 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"396ea6ec20b0f113f8640333c0ace13284a2d5889e081f59f9abe179d1992864\": container with ID starting with 396ea6ec20b0f113f8640333c0ace13284a2d5889e081f59f9abe179d1992864 not found: ID does not exist" containerID="396ea6ec20b0f113f8640333c0ace13284a2d5889e081f59f9abe179d1992864" Dec 04 18:56:55 crc kubenswrapper[4631]: I1204 18:56:55.734023 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"396ea6ec20b0f113f8640333c0ace13284a2d5889e081f59f9abe179d1992864"} err="failed to get container status \"396ea6ec20b0f113f8640333c0ace13284a2d5889e081f59f9abe179d1992864\": rpc error: code = NotFound desc = could not find container \"396ea6ec20b0f113f8640333c0ace13284a2d5889e081f59f9abe179d1992864\": container with ID starting with 396ea6ec20b0f113f8640333c0ace13284a2d5889e081f59f9abe179d1992864 not found: ID does not exist" Dec 04 18:56:55 crc kubenswrapper[4631]: I1204 18:56:55.734091 4631 scope.go:117] "RemoveContainer" containerID="4a7a003e274ba374eefb0f52d41119e54bab408aa51ff86d9ee9911ad861b7a4" Dec 04 18:56:55 crc kubenswrapper[4631]: E1204 18:56:55.734432 4631 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a7a003e274ba374eefb0f52d41119e54bab408aa51ff86d9ee9911ad861b7a4\": container with ID starting with 4a7a003e274ba374eefb0f52d41119e54bab408aa51ff86d9ee9911ad861b7a4 not found: ID does not exist" containerID="4a7a003e274ba374eefb0f52d41119e54bab408aa51ff86d9ee9911ad861b7a4" Dec 04 18:56:55 crc kubenswrapper[4631]: I1204 18:56:55.734469 4631 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a7a003e274ba374eefb0f52d41119e54bab408aa51ff86d9ee9911ad861b7a4"} err="failed to get container status \"4a7a003e274ba374eefb0f52d41119e54bab408aa51ff86d9ee9911ad861b7a4\": rpc error: code = NotFound desc = could not find container \"4a7a003e274ba374eefb0f52d41119e54bab408aa51ff86d9ee9911ad861b7a4\": container with ID starting with 4a7a003e274ba374eefb0f52d41119e54bab408aa51ff86d9ee9911ad861b7a4 not found: ID does not exist" Dec 04 18:56:56 crc kubenswrapper[4631]: I1204 18:56:56.249733 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be06e218-e114-444c-9682-dd7d8a0feb28" path="/var/lib/kubelet/pods/be06e218-e114-444c-9682-dd7d8a0feb28/volumes" Dec 04 18:57:25 crc kubenswrapper[4631]: I1204 18:57:25.135952 4631 scope.go:117] "RemoveContainer" containerID="cf6689bab790b2867dc649dbbe8396460a547e01bb01307fa9fafe9691990fcc" Dec 04 18:58:25 crc kubenswrapper[4631]: I1204 18:58:25.233221 4631 scope.go:117] "RemoveContainer" containerID="d609e9439f5426ec3faf00cf289265b20daa7012e313d03c388765e6fa873e97" Dec 04 18:59:06 crc kubenswrapper[4631]: I1204 18:59:06.022883 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:59:06 crc kubenswrapper[4631]: I1204 18:59:06.023440 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 18:59:36 crc kubenswrapper[4631]: I1204 18:59:36.022333 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 18:59:36 crc kubenswrapper[4631]: I1204 18:59:36.022842 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.177680 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414580-fdl2n"] Dec 04 19:00:00 crc kubenswrapper[4631]: E1204 19:00:00.178874 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be06e218-e114-444c-9682-dd7d8a0feb28" containerName="gather" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.178897 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="be06e218-e114-444c-9682-dd7d8a0feb28" containerName="gather" Dec 04 19:00:00 crc kubenswrapper[4631]: E1204 19:00:00.178925 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a2d5e15-e63d-4d20-8ad2-5d51245de9fb" containerName="extract-content" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.178939 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a2d5e15-e63d-4d20-8ad2-5d51245de9fb" containerName="extract-content" Dec 04 19:00:00 crc kubenswrapper[4631]: E1204 19:00:00.178959 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a2d5e15-e63d-4d20-8ad2-5d51245de9fb" containerName="registry-server" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.178971 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a2d5e15-e63d-4d20-8ad2-5d51245de9fb" containerName="registry-server" Dec 04 19:00:00 crc kubenswrapper[4631]: E1204 19:00:00.178999 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a2d5e15-e63d-4d20-8ad2-5d51245de9fb" containerName="extract-utilities" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.179010 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a2d5e15-e63d-4d20-8ad2-5d51245de9fb" containerName="extract-utilities" Dec 04 19:00:00 crc kubenswrapper[4631]: E1204 19:00:00.179027 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be06e218-e114-444c-9682-dd7d8a0feb28" containerName="copy" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.179037 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="be06e218-e114-444c-9682-dd7d8a0feb28" containerName="copy" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.179423 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a2d5e15-e63d-4d20-8ad2-5d51245de9fb" containerName="registry-server" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.179443 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="be06e218-e114-444c-9682-dd7d8a0feb28" containerName="gather" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.179463 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="be06e218-e114-444c-9682-dd7d8a0feb28" containerName="copy" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.180360 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414580-fdl2n" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.183573 4631 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.183745 4631 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.197133 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414580-fdl2n"] Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.326452 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7229821e-7b60-42a9-badc-fbc07bd829e7-secret-volume\") pod \"collect-profiles-29414580-fdl2n\" (UID: \"7229821e-7b60-42a9-badc-fbc07bd829e7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414580-fdl2n" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.326503 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7229821e-7b60-42a9-badc-fbc07bd829e7-config-volume\") pod \"collect-profiles-29414580-fdl2n\" (UID: \"7229821e-7b60-42a9-badc-fbc07bd829e7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414580-fdl2n" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.326574 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55x89\" (UniqueName: \"kubernetes.io/projected/7229821e-7b60-42a9-badc-fbc07bd829e7-kube-api-access-55x89\") pod \"collect-profiles-29414580-fdl2n\" (UID: \"7229821e-7b60-42a9-badc-fbc07bd829e7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414580-fdl2n" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.431498 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7229821e-7b60-42a9-badc-fbc07bd829e7-secret-volume\") pod \"collect-profiles-29414580-fdl2n\" (UID: \"7229821e-7b60-42a9-badc-fbc07bd829e7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414580-fdl2n" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.431570 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7229821e-7b60-42a9-badc-fbc07bd829e7-config-volume\") pod \"collect-profiles-29414580-fdl2n\" (UID: \"7229821e-7b60-42a9-badc-fbc07bd829e7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414580-fdl2n" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.431659 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55x89\" (UniqueName: \"kubernetes.io/projected/7229821e-7b60-42a9-badc-fbc07bd829e7-kube-api-access-55x89\") pod \"collect-profiles-29414580-fdl2n\" (UID: \"7229821e-7b60-42a9-badc-fbc07bd829e7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414580-fdl2n" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.445407 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7229821e-7b60-42a9-badc-fbc07bd829e7-config-volume\") pod \"collect-profiles-29414580-fdl2n\" (UID: \"7229821e-7b60-42a9-badc-fbc07bd829e7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414580-fdl2n" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.455811 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7229821e-7b60-42a9-badc-fbc07bd829e7-secret-volume\") pod \"collect-profiles-29414580-fdl2n\" (UID: \"7229821e-7b60-42a9-badc-fbc07bd829e7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414580-fdl2n" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.462621 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55x89\" (UniqueName: \"kubernetes.io/projected/7229821e-7b60-42a9-badc-fbc07bd829e7-kube-api-access-55x89\") pod \"collect-profiles-29414580-fdl2n\" (UID: \"7229821e-7b60-42a9-badc-fbc07bd829e7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414580-fdl2n" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.526197 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414580-fdl2n" Dec 04 19:00:00 crc kubenswrapper[4631]: I1204 19:00:00.980002 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414580-fdl2n"] Dec 04 19:00:01 crc kubenswrapper[4631]: I1204 19:00:01.387591 4631 generic.go:334] "Generic (PLEG): container finished" podID="7229821e-7b60-42a9-badc-fbc07bd829e7" containerID="471be3198d9bf8b83fa19df38896ea99952405f0ad4ea42bee11eb3f8b76e889" exitCode=0 Dec 04 19:00:01 crc kubenswrapper[4631]: I1204 19:00:01.387658 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414580-fdl2n" event={"ID":"7229821e-7b60-42a9-badc-fbc07bd829e7","Type":"ContainerDied","Data":"471be3198d9bf8b83fa19df38896ea99952405f0ad4ea42bee11eb3f8b76e889"} Dec 04 19:00:01 crc kubenswrapper[4631]: I1204 19:00:01.388946 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414580-fdl2n" event={"ID":"7229821e-7b60-42a9-badc-fbc07bd829e7","Type":"ContainerStarted","Data":"f16872f1a7790191c46396d57e10e6200122926e707b8138b9eeadf8b9b69bc3"} Dec 04 19:00:02 crc kubenswrapper[4631]: I1204 19:00:02.730491 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414580-fdl2n" Dec 04 19:00:02 crc kubenswrapper[4631]: I1204 19:00:02.896881 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7229821e-7b60-42a9-badc-fbc07bd829e7-secret-volume\") pod \"7229821e-7b60-42a9-badc-fbc07bd829e7\" (UID: \"7229821e-7b60-42a9-badc-fbc07bd829e7\") " Dec 04 19:00:02 crc kubenswrapper[4631]: I1204 19:00:02.896992 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55x89\" (UniqueName: \"kubernetes.io/projected/7229821e-7b60-42a9-badc-fbc07bd829e7-kube-api-access-55x89\") pod \"7229821e-7b60-42a9-badc-fbc07bd829e7\" (UID: \"7229821e-7b60-42a9-badc-fbc07bd829e7\") " Dec 04 19:00:02 crc kubenswrapper[4631]: I1204 19:00:02.897154 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7229821e-7b60-42a9-badc-fbc07bd829e7-config-volume\") pod \"7229821e-7b60-42a9-badc-fbc07bd829e7\" (UID: \"7229821e-7b60-42a9-badc-fbc07bd829e7\") " Dec 04 19:00:02 crc kubenswrapper[4631]: I1204 19:00:02.898042 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7229821e-7b60-42a9-badc-fbc07bd829e7-config-volume" (OuterVolumeSpecName: "config-volume") pod "7229821e-7b60-42a9-badc-fbc07bd829e7" (UID: "7229821e-7b60-42a9-badc-fbc07bd829e7"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 04 19:00:02 crc kubenswrapper[4631]: I1204 19:00:02.902760 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7229821e-7b60-42a9-badc-fbc07bd829e7-kube-api-access-55x89" (OuterVolumeSpecName: "kube-api-access-55x89") pod "7229821e-7b60-42a9-badc-fbc07bd829e7" (UID: "7229821e-7b60-42a9-badc-fbc07bd829e7"). InnerVolumeSpecName "kube-api-access-55x89". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 19:00:02 crc kubenswrapper[4631]: I1204 19:00:02.902861 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7229821e-7b60-42a9-badc-fbc07bd829e7-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7229821e-7b60-42a9-badc-fbc07bd829e7" (UID: "7229821e-7b60-42a9-badc-fbc07bd829e7"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 04 19:00:02 crc kubenswrapper[4631]: I1204 19:00:02.999054 4631 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7229821e-7b60-42a9-badc-fbc07bd829e7-config-volume\") on node \"crc\" DevicePath \"\"" Dec 04 19:00:02 crc kubenswrapper[4631]: I1204 19:00:02.999090 4631 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7229821e-7b60-42a9-badc-fbc07bd829e7-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 04 19:00:02 crc kubenswrapper[4631]: I1204 19:00:02.999100 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55x89\" (UniqueName: \"kubernetes.io/projected/7229821e-7b60-42a9-badc-fbc07bd829e7-kube-api-access-55x89\") on node \"crc\" DevicePath \"\"" Dec 04 19:00:03 crc kubenswrapper[4631]: I1204 19:00:03.409899 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414580-fdl2n" event={"ID":"7229821e-7b60-42a9-badc-fbc07bd829e7","Type":"ContainerDied","Data":"f16872f1a7790191c46396d57e10e6200122926e707b8138b9eeadf8b9b69bc3"} Dec 04 19:00:03 crc kubenswrapper[4631]: I1204 19:00:03.410312 4631 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f16872f1a7790191c46396d57e10e6200122926e707b8138b9eeadf8b9b69bc3" Dec 04 19:00:03 crc kubenswrapper[4631]: I1204 19:00:03.410022 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414580-fdl2n" Dec 04 19:00:03 crc kubenswrapper[4631]: I1204 19:00:03.803515 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9"] Dec 04 19:00:03 crc kubenswrapper[4631]: I1204 19:00:03.812099 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414535-fjph9"] Dec 04 19:00:04 crc kubenswrapper[4631]: I1204 19:00:04.249778 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7e19c3d-d09f-44f8-9f35-8f30eb34904e" path="/var/lib/kubelet/pods/d7e19c3d-d09f-44f8-9f35-8f30eb34904e/volumes" Dec 04 19:00:06 crc kubenswrapper[4631]: I1204 19:00:06.022610 4631 patch_prober.go:28] interesting pod/machine-config-daemon-q27wh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 04 19:00:06 crc kubenswrapper[4631]: I1204 19:00:06.022913 4631 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 04 19:00:06 crc kubenswrapper[4631]: I1204 19:00:06.022960 4631 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" Dec 04 19:00:06 crc kubenswrapper[4631]: I1204 19:00:06.023972 4631 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7c0ecddc840f759bcfc606f605243b760d75032e9ed362021a89b368cf4a2018"} pod="openshift-machine-config-operator/machine-config-daemon-q27wh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 04 19:00:06 crc kubenswrapper[4631]: I1204 19:00:06.024034 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" podUID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerName="machine-config-daemon" containerID="cri-o://7c0ecddc840f759bcfc606f605243b760d75032e9ed362021a89b368cf4a2018" gracePeriod=600 Dec 04 19:00:06 crc kubenswrapper[4631]: I1204 19:00:06.438562 4631 generic.go:334] "Generic (PLEG): container finished" podID="fc938ac1-b2a3-4435-bda5-c7be66763a01" containerID="7c0ecddc840f759bcfc606f605243b760d75032e9ed362021a89b368cf4a2018" exitCode=0 Dec 04 19:00:06 crc kubenswrapper[4631]: I1204 19:00:06.438647 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerDied","Data":"7c0ecddc840f759bcfc606f605243b760d75032e9ed362021a89b368cf4a2018"} Dec 04 19:00:06 crc kubenswrapper[4631]: I1204 19:00:06.438889 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q27wh" event={"ID":"fc938ac1-b2a3-4435-bda5-c7be66763a01","Type":"ContainerStarted","Data":"9031f1ebe11eab46670c2828fcc3d46d2ded688961b1d9c5a4ee0ecee9d79091"} Dec 04 19:00:06 crc kubenswrapper[4631]: I1204 19:00:06.438911 4631 scope.go:117] "RemoveContainer" containerID="05d24717fe2b6ae5b88699e76905089a70118a3588dbbb9a7a769b0a4199d149" Dec 04 19:00:25 crc kubenswrapper[4631]: I1204 19:00:25.320022 4631 scope.go:117] "RemoveContainer" containerID="510ef3a11cb27425c86a50c66f257e70cc99033c042c1e9fefa2128ab5419c25" Dec 04 19:00:40 crc kubenswrapper[4631]: I1204 19:00:40.400545 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bxlzl"] Dec 04 19:00:40 crc kubenswrapper[4631]: E1204 19:00:40.401476 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7229821e-7b60-42a9-badc-fbc07bd829e7" containerName="collect-profiles" Dec 04 19:00:40 crc kubenswrapper[4631]: I1204 19:00:40.401492 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="7229821e-7b60-42a9-badc-fbc07bd829e7" containerName="collect-profiles" Dec 04 19:00:40 crc kubenswrapper[4631]: I1204 19:00:40.401690 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="7229821e-7b60-42a9-badc-fbc07bd829e7" containerName="collect-profiles" Dec 04 19:00:40 crc kubenswrapper[4631]: I1204 19:00:40.402953 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bxlzl" Dec 04 19:00:40 crc kubenswrapper[4631]: I1204 19:00:40.416739 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bxlzl"] Dec 04 19:00:40 crc kubenswrapper[4631]: I1204 19:00:40.512147 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlzkn\" (UniqueName: \"kubernetes.io/projected/a12b063e-82c3-43ac-aa9f-4d1d0201e076-kube-api-access-qlzkn\") pod \"community-operators-bxlzl\" (UID: \"a12b063e-82c3-43ac-aa9f-4d1d0201e076\") " pod="openshift-marketplace/community-operators-bxlzl" Dec 04 19:00:40 crc kubenswrapper[4631]: I1204 19:00:40.512285 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a12b063e-82c3-43ac-aa9f-4d1d0201e076-catalog-content\") pod \"community-operators-bxlzl\" (UID: \"a12b063e-82c3-43ac-aa9f-4d1d0201e076\") " pod="openshift-marketplace/community-operators-bxlzl" Dec 04 19:00:40 crc kubenswrapper[4631]: I1204 19:00:40.512387 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a12b063e-82c3-43ac-aa9f-4d1d0201e076-utilities\") pod \"community-operators-bxlzl\" (UID: \"a12b063e-82c3-43ac-aa9f-4d1d0201e076\") " pod="openshift-marketplace/community-operators-bxlzl" Dec 04 19:00:40 crc kubenswrapper[4631]: I1204 19:00:40.613480 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a12b063e-82c3-43ac-aa9f-4d1d0201e076-catalog-content\") pod \"community-operators-bxlzl\" (UID: \"a12b063e-82c3-43ac-aa9f-4d1d0201e076\") " pod="openshift-marketplace/community-operators-bxlzl" Dec 04 19:00:40 crc kubenswrapper[4631]: I1204 19:00:40.613566 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a12b063e-82c3-43ac-aa9f-4d1d0201e076-utilities\") pod \"community-operators-bxlzl\" (UID: \"a12b063e-82c3-43ac-aa9f-4d1d0201e076\") " pod="openshift-marketplace/community-operators-bxlzl" Dec 04 19:00:40 crc kubenswrapper[4631]: I1204 19:00:40.613608 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlzkn\" (UniqueName: \"kubernetes.io/projected/a12b063e-82c3-43ac-aa9f-4d1d0201e076-kube-api-access-qlzkn\") pod \"community-operators-bxlzl\" (UID: \"a12b063e-82c3-43ac-aa9f-4d1d0201e076\") " pod="openshift-marketplace/community-operators-bxlzl" Dec 04 19:00:40 crc kubenswrapper[4631]: I1204 19:00:40.613970 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a12b063e-82c3-43ac-aa9f-4d1d0201e076-catalog-content\") pod \"community-operators-bxlzl\" (UID: \"a12b063e-82c3-43ac-aa9f-4d1d0201e076\") " pod="openshift-marketplace/community-operators-bxlzl" Dec 04 19:00:40 crc kubenswrapper[4631]: I1204 19:00:40.614052 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a12b063e-82c3-43ac-aa9f-4d1d0201e076-utilities\") pod \"community-operators-bxlzl\" (UID: \"a12b063e-82c3-43ac-aa9f-4d1d0201e076\") " pod="openshift-marketplace/community-operators-bxlzl" Dec 04 19:00:40 crc kubenswrapper[4631]: I1204 19:00:40.641680 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlzkn\" (UniqueName: \"kubernetes.io/projected/a12b063e-82c3-43ac-aa9f-4d1d0201e076-kube-api-access-qlzkn\") pod \"community-operators-bxlzl\" (UID: \"a12b063e-82c3-43ac-aa9f-4d1d0201e076\") " pod="openshift-marketplace/community-operators-bxlzl" Dec 04 19:00:40 crc kubenswrapper[4631]: I1204 19:00:40.771348 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bxlzl" Dec 04 19:00:41 crc kubenswrapper[4631]: I1204 19:00:41.290274 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bxlzl"] Dec 04 19:00:41 crc kubenswrapper[4631]: W1204 19:00:41.302855 4631 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda12b063e_82c3_43ac_aa9f_4d1d0201e076.slice/crio-dd4c97d1be2f829a6b16872ed0335643ea78c9cd606ec3c752c5111bfa29255b WatchSource:0}: Error finding container dd4c97d1be2f829a6b16872ed0335643ea78c9cd606ec3c752c5111bfa29255b: Status 404 returned error can't find the container with id dd4c97d1be2f829a6b16872ed0335643ea78c9cd606ec3c752c5111bfa29255b Dec 04 19:00:41 crc kubenswrapper[4631]: I1204 19:00:41.744930 4631 generic.go:334] "Generic (PLEG): container finished" podID="a12b063e-82c3-43ac-aa9f-4d1d0201e076" containerID="14ac533d483f3dc843bdde26ba0b835c4a007e5199c0abda9dad51a192d15ef3" exitCode=0 Dec 04 19:00:41 crc kubenswrapper[4631]: I1204 19:00:41.744980 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bxlzl" event={"ID":"a12b063e-82c3-43ac-aa9f-4d1d0201e076","Type":"ContainerDied","Data":"14ac533d483f3dc843bdde26ba0b835c4a007e5199c0abda9dad51a192d15ef3"} Dec 04 19:00:41 crc kubenswrapper[4631]: I1204 19:00:41.745005 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bxlzl" event={"ID":"a12b063e-82c3-43ac-aa9f-4d1d0201e076","Type":"ContainerStarted","Data":"dd4c97d1be2f829a6b16872ed0335643ea78c9cd606ec3c752c5111bfa29255b"} Dec 04 19:00:41 crc kubenswrapper[4631]: I1204 19:00:41.747135 4631 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 04 19:00:42 crc kubenswrapper[4631]: I1204 19:00:42.755318 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bxlzl" event={"ID":"a12b063e-82c3-43ac-aa9f-4d1d0201e076","Type":"ContainerStarted","Data":"54ae264d2e53c75ac999ff99a057be95f09ee2faa217a7019c36a8636e9ef764"} Dec 04 19:00:43 crc kubenswrapper[4631]: I1204 19:00:43.765474 4631 generic.go:334] "Generic (PLEG): container finished" podID="a12b063e-82c3-43ac-aa9f-4d1d0201e076" containerID="54ae264d2e53c75ac999ff99a057be95f09ee2faa217a7019c36a8636e9ef764" exitCode=0 Dec 04 19:00:43 crc kubenswrapper[4631]: I1204 19:00:43.765516 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bxlzl" event={"ID":"a12b063e-82c3-43ac-aa9f-4d1d0201e076","Type":"ContainerDied","Data":"54ae264d2e53c75ac999ff99a057be95f09ee2faa217a7019c36a8636e9ef764"} Dec 04 19:00:44 crc kubenswrapper[4631]: I1204 19:00:44.774775 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bxlzl" event={"ID":"a12b063e-82c3-43ac-aa9f-4d1d0201e076","Type":"ContainerStarted","Data":"6515ffd1b7735c352ae594b6e8ab018d4ac2fd02d0bceead39eb81d4c9d917a9"} Dec 04 19:00:44 crc kubenswrapper[4631]: I1204 19:00:44.799860 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bxlzl" podStartSLOduration=2.394740378 podStartE2EDuration="4.799840016s" podCreationTimestamp="2025-12-04 19:00:40 +0000 UTC" firstStartedPulling="2025-12-04 19:00:41.746921537 +0000 UTC m=+5571.779163535" lastFinishedPulling="2025-12-04 19:00:44.152021165 +0000 UTC m=+5574.184263173" observedRunningTime="2025-12-04 19:00:44.793707342 +0000 UTC m=+5574.825949350" watchObservedRunningTime="2025-12-04 19:00:44.799840016 +0000 UTC m=+5574.832082034" Dec 04 19:00:49 crc kubenswrapper[4631]: I1204 19:00:49.496154 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-q47hz"] Dec 04 19:00:49 crc kubenswrapper[4631]: I1204 19:00:49.502493 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q47hz" Dec 04 19:00:49 crc kubenswrapper[4631]: I1204 19:00:49.508499 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q47hz"] Dec 04 19:00:49 crc kubenswrapper[4631]: I1204 19:00:49.574176 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7wb7\" (UniqueName: \"kubernetes.io/projected/66cc7428-b9b9-4c8c-9c05-65523a2f022f-kube-api-access-z7wb7\") pod \"redhat-marketplace-q47hz\" (UID: \"66cc7428-b9b9-4c8c-9c05-65523a2f022f\") " pod="openshift-marketplace/redhat-marketplace-q47hz" Dec 04 19:00:49 crc kubenswrapper[4631]: I1204 19:00:49.574263 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66cc7428-b9b9-4c8c-9c05-65523a2f022f-catalog-content\") pod \"redhat-marketplace-q47hz\" (UID: \"66cc7428-b9b9-4c8c-9c05-65523a2f022f\") " pod="openshift-marketplace/redhat-marketplace-q47hz" Dec 04 19:00:49 crc kubenswrapper[4631]: I1204 19:00:49.574301 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66cc7428-b9b9-4c8c-9c05-65523a2f022f-utilities\") pod \"redhat-marketplace-q47hz\" (UID: \"66cc7428-b9b9-4c8c-9c05-65523a2f022f\") " pod="openshift-marketplace/redhat-marketplace-q47hz" Dec 04 19:00:49 crc kubenswrapper[4631]: I1204 19:00:49.675653 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66cc7428-b9b9-4c8c-9c05-65523a2f022f-catalog-content\") pod \"redhat-marketplace-q47hz\" (UID: \"66cc7428-b9b9-4c8c-9c05-65523a2f022f\") " pod="openshift-marketplace/redhat-marketplace-q47hz" Dec 04 19:00:49 crc kubenswrapper[4631]: I1204 19:00:49.675719 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66cc7428-b9b9-4c8c-9c05-65523a2f022f-utilities\") pod \"redhat-marketplace-q47hz\" (UID: \"66cc7428-b9b9-4c8c-9c05-65523a2f022f\") " pod="openshift-marketplace/redhat-marketplace-q47hz" Dec 04 19:00:49 crc kubenswrapper[4631]: I1204 19:00:49.675883 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7wb7\" (UniqueName: \"kubernetes.io/projected/66cc7428-b9b9-4c8c-9c05-65523a2f022f-kube-api-access-z7wb7\") pod \"redhat-marketplace-q47hz\" (UID: \"66cc7428-b9b9-4c8c-9c05-65523a2f022f\") " pod="openshift-marketplace/redhat-marketplace-q47hz" Dec 04 19:00:49 crc kubenswrapper[4631]: I1204 19:00:49.676170 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66cc7428-b9b9-4c8c-9c05-65523a2f022f-catalog-content\") pod \"redhat-marketplace-q47hz\" (UID: \"66cc7428-b9b9-4c8c-9c05-65523a2f022f\") " pod="openshift-marketplace/redhat-marketplace-q47hz" Dec 04 19:00:49 crc kubenswrapper[4631]: I1204 19:00:49.676293 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66cc7428-b9b9-4c8c-9c05-65523a2f022f-utilities\") pod \"redhat-marketplace-q47hz\" (UID: \"66cc7428-b9b9-4c8c-9c05-65523a2f022f\") " pod="openshift-marketplace/redhat-marketplace-q47hz" Dec 04 19:00:49 crc kubenswrapper[4631]: I1204 19:00:49.696464 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7wb7\" (UniqueName: \"kubernetes.io/projected/66cc7428-b9b9-4c8c-9c05-65523a2f022f-kube-api-access-z7wb7\") pod \"redhat-marketplace-q47hz\" (UID: \"66cc7428-b9b9-4c8c-9c05-65523a2f022f\") " pod="openshift-marketplace/redhat-marketplace-q47hz" Dec 04 19:00:49 crc kubenswrapper[4631]: I1204 19:00:49.830302 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q47hz" Dec 04 19:00:50 crc kubenswrapper[4631]: I1204 19:00:50.306669 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q47hz"] Dec 04 19:00:50 crc kubenswrapper[4631]: I1204 19:00:50.772054 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bxlzl" Dec 04 19:00:50 crc kubenswrapper[4631]: I1204 19:00:50.772484 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bxlzl" Dec 04 19:00:50 crc kubenswrapper[4631]: I1204 19:00:50.828300 4631 generic.go:334] "Generic (PLEG): container finished" podID="66cc7428-b9b9-4c8c-9c05-65523a2f022f" containerID="a910d9ddb91f2492ad474d5c3aa6693802c5cae4c3f8e7add88b37c445ea0f31" exitCode=0 Dec 04 19:00:50 crc kubenswrapper[4631]: I1204 19:00:50.828420 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q47hz" event={"ID":"66cc7428-b9b9-4c8c-9c05-65523a2f022f","Type":"ContainerDied","Data":"a910d9ddb91f2492ad474d5c3aa6693802c5cae4c3f8e7add88b37c445ea0f31"} Dec 04 19:00:50 crc kubenswrapper[4631]: I1204 19:00:50.828502 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q47hz" event={"ID":"66cc7428-b9b9-4c8c-9c05-65523a2f022f","Type":"ContainerStarted","Data":"e5545bc7892821933303abb3c2aef0d1b5742438bb82512939b2717c38320769"} Dec 04 19:00:50 crc kubenswrapper[4631]: I1204 19:00:50.862416 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bxlzl" Dec 04 19:00:50 crc kubenswrapper[4631]: I1204 19:00:50.932659 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bxlzl" Dec 04 19:00:51 crc kubenswrapper[4631]: I1204 19:00:51.840022 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q47hz" event={"ID":"66cc7428-b9b9-4c8c-9c05-65523a2f022f","Type":"ContainerStarted","Data":"45448793bb69f53a7bd7cad3a7bf84a3de8bec60b60238bb78970c4b4f3dbcb0"} Dec 04 19:00:52 crc kubenswrapper[4631]: I1204 19:00:52.854122 4631 generic.go:334] "Generic (PLEG): container finished" podID="66cc7428-b9b9-4c8c-9c05-65523a2f022f" containerID="45448793bb69f53a7bd7cad3a7bf84a3de8bec60b60238bb78970c4b4f3dbcb0" exitCode=0 Dec 04 19:00:52 crc kubenswrapper[4631]: I1204 19:00:52.854343 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q47hz" event={"ID":"66cc7428-b9b9-4c8c-9c05-65523a2f022f","Type":"ContainerDied","Data":"45448793bb69f53a7bd7cad3a7bf84a3de8bec60b60238bb78970c4b4f3dbcb0"} Dec 04 19:00:53 crc kubenswrapper[4631]: I1204 19:00:53.274716 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bxlzl"] Dec 04 19:00:53 crc kubenswrapper[4631]: I1204 19:00:53.275214 4631 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bxlzl" podUID="a12b063e-82c3-43ac-aa9f-4d1d0201e076" containerName="registry-server" containerID="cri-o://6515ffd1b7735c352ae594b6e8ab018d4ac2fd02d0bceead39eb81d4c9d917a9" gracePeriod=2 Dec 04 19:00:53 crc kubenswrapper[4631]: I1204 19:00:53.867284 4631 generic.go:334] "Generic (PLEG): container finished" podID="a12b063e-82c3-43ac-aa9f-4d1d0201e076" containerID="6515ffd1b7735c352ae594b6e8ab018d4ac2fd02d0bceead39eb81d4c9d917a9" exitCode=0 Dec 04 19:00:53 crc kubenswrapper[4631]: I1204 19:00:53.867343 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bxlzl" event={"ID":"a12b063e-82c3-43ac-aa9f-4d1d0201e076","Type":"ContainerDied","Data":"6515ffd1b7735c352ae594b6e8ab018d4ac2fd02d0bceead39eb81d4c9d917a9"} Dec 04 19:00:53 crc kubenswrapper[4631]: I1204 19:00:53.869592 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q47hz" event={"ID":"66cc7428-b9b9-4c8c-9c05-65523a2f022f","Type":"ContainerStarted","Data":"fa64e437bf2a227e250f55bbdd47933fb8a743241ea62e718d993f324a9f1682"} Dec 04 19:00:53 crc kubenswrapper[4631]: I1204 19:00:53.895825 4631 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-q47hz" podStartSLOduration=2.479300907 podStartE2EDuration="4.89580336s" podCreationTimestamp="2025-12-04 19:00:49 +0000 UTC" firstStartedPulling="2025-12-04 19:00:50.83093077 +0000 UTC m=+5580.863172768" lastFinishedPulling="2025-12-04 19:00:53.247433183 +0000 UTC m=+5583.279675221" observedRunningTime="2025-12-04 19:00:53.885182458 +0000 UTC m=+5583.917424456" watchObservedRunningTime="2025-12-04 19:00:53.89580336 +0000 UTC m=+5583.928045378" Dec 04 19:00:54 crc kubenswrapper[4631]: I1204 19:00:54.298853 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bxlzl" Dec 04 19:00:54 crc kubenswrapper[4631]: I1204 19:00:54.425127 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qlzkn\" (UniqueName: \"kubernetes.io/projected/a12b063e-82c3-43ac-aa9f-4d1d0201e076-kube-api-access-qlzkn\") pod \"a12b063e-82c3-43ac-aa9f-4d1d0201e076\" (UID: \"a12b063e-82c3-43ac-aa9f-4d1d0201e076\") " Dec 04 19:00:54 crc kubenswrapper[4631]: I1204 19:00:54.425448 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a12b063e-82c3-43ac-aa9f-4d1d0201e076-catalog-content\") pod \"a12b063e-82c3-43ac-aa9f-4d1d0201e076\" (UID: \"a12b063e-82c3-43ac-aa9f-4d1d0201e076\") " Dec 04 19:00:54 crc kubenswrapper[4631]: I1204 19:00:54.425620 4631 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a12b063e-82c3-43ac-aa9f-4d1d0201e076-utilities\") pod \"a12b063e-82c3-43ac-aa9f-4d1d0201e076\" (UID: \"a12b063e-82c3-43ac-aa9f-4d1d0201e076\") " Dec 04 19:00:54 crc kubenswrapper[4631]: I1204 19:00:54.428980 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a12b063e-82c3-43ac-aa9f-4d1d0201e076-utilities" (OuterVolumeSpecName: "utilities") pod "a12b063e-82c3-43ac-aa9f-4d1d0201e076" (UID: "a12b063e-82c3-43ac-aa9f-4d1d0201e076"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 19:00:54 crc kubenswrapper[4631]: I1204 19:00:54.433254 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a12b063e-82c3-43ac-aa9f-4d1d0201e076-kube-api-access-qlzkn" (OuterVolumeSpecName: "kube-api-access-qlzkn") pod "a12b063e-82c3-43ac-aa9f-4d1d0201e076" (UID: "a12b063e-82c3-43ac-aa9f-4d1d0201e076"). InnerVolumeSpecName "kube-api-access-qlzkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 04 19:00:54 crc kubenswrapper[4631]: I1204 19:00:54.472085 4631 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a12b063e-82c3-43ac-aa9f-4d1d0201e076-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a12b063e-82c3-43ac-aa9f-4d1d0201e076" (UID: "a12b063e-82c3-43ac-aa9f-4d1d0201e076"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 04 19:00:54 crc kubenswrapper[4631]: I1204 19:00:54.527902 4631 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qlzkn\" (UniqueName: \"kubernetes.io/projected/a12b063e-82c3-43ac-aa9f-4d1d0201e076-kube-api-access-qlzkn\") on node \"crc\" DevicePath \"\"" Dec 04 19:00:54 crc kubenswrapper[4631]: I1204 19:00:54.527925 4631 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a12b063e-82c3-43ac-aa9f-4d1d0201e076-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 04 19:00:54 crc kubenswrapper[4631]: I1204 19:00:54.527943 4631 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a12b063e-82c3-43ac-aa9f-4d1d0201e076-utilities\") on node \"crc\" DevicePath \"\"" Dec 04 19:00:54 crc kubenswrapper[4631]: I1204 19:00:54.879748 4631 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bxlzl" event={"ID":"a12b063e-82c3-43ac-aa9f-4d1d0201e076","Type":"ContainerDied","Data":"dd4c97d1be2f829a6b16872ed0335643ea78c9cd606ec3c752c5111bfa29255b"} Dec 04 19:00:54 crc kubenswrapper[4631]: I1204 19:00:54.879813 4631 scope.go:117] "RemoveContainer" containerID="6515ffd1b7735c352ae594b6e8ab018d4ac2fd02d0bceead39eb81d4c9d917a9" Dec 04 19:00:54 crc kubenswrapper[4631]: I1204 19:00:54.880380 4631 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bxlzl" Dec 04 19:00:54 crc kubenswrapper[4631]: I1204 19:00:54.898715 4631 scope.go:117] "RemoveContainer" containerID="54ae264d2e53c75ac999ff99a057be95f09ee2faa217a7019c36a8636e9ef764" Dec 04 19:00:54 crc kubenswrapper[4631]: I1204 19:00:54.918150 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bxlzl"] Dec 04 19:00:54 crc kubenswrapper[4631]: I1204 19:00:54.926388 4631 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bxlzl"] Dec 04 19:00:54 crc kubenswrapper[4631]: I1204 19:00:54.931361 4631 scope.go:117] "RemoveContainer" containerID="14ac533d483f3dc843bdde26ba0b835c4a007e5199c0abda9dad51a192d15ef3" Dec 04 19:00:56 crc kubenswrapper[4631]: I1204 19:00:56.249638 4631 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a12b063e-82c3-43ac-aa9f-4d1d0201e076" path="/var/lib/kubelet/pods/a12b063e-82c3-43ac-aa9f-4d1d0201e076/volumes" Dec 04 19:00:59 crc kubenswrapper[4631]: I1204 19:00:59.830927 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-q47hz" Dec 04 19:00:59 crc kubenswrapper[4631]: I1204 19:00:59.832449 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-q47hz" Dec 04 19:00:59 crc kubenswrapper[4631]: I1204 19:00:59.873883 4631 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-q47hz" Dec 04 19:00:59 crc kubenswrapper[4631]: I1204 19:00:59.964766 4631 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-q47hz" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.110328 4631 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q47hz"] Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.146445 4631 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29414581-g9nsp"] Dec 04 19:01:00 crc kubenswrapper[4631]: E1204 19:01:00.147004 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a12b063e-82c3-43ac-aa9f-4d1d0201e076" containerName="registry-server" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.147067 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a12b063e-82c3-43ac-aa9f-4d1d0201e076" containerName="registry-server" Dec 04 19:01:00 crc kubenswrapper[4631]: E1204 19:01:00.147143 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a12b063e-82c3-43ac-aa9f-4d1d0201e076" containerName="extract-content" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.147193 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a12b063e-82c3-43ac-aa9f-4d1d0201e076" containerName="extract-content" Dec 04 19:01:00 crc kubenswrapper[4631]: E1204 19:01:00.147251 4631 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a12b063e-82c3-43ac-aa9f-4d1d0201e076" containerName="extract-utilities" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.147305 4631 state_mem.go:107] "Deleted CPUSet assignment" podUID="a12b063e-82c3-43ac-aa9f-4d1d0201e076" containerName="extract-utilities" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.147587 4631 memory_manager.go:354] "RemoveStaleState removing state" podUID="a12b063e-82c3-43ac-aa9f-4d1d0201e076" containerName="registry-server" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.148279 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29414581-g9nsp" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.157735 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29414581-g9nsp"] Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.339304 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pk5xs\" (UniqueName: \"kubernetes.io/projected/ab22e184-91b4-446f-b2e2-29defe3ff835-kube-api-access-pk5xs\") pod \"keystone-cron-29414581-g9nsp\" (UID: \"ab22e184-91b4-446f-b2e2-29defe3ff835\") " pod="openstack/keystone-cron-29414581-g9nsp" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.339417 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab22e184-91b4-446f-b2e2-29defe3ff835-combined-ca-bundle\") pod \"keystone-cron-29414581-g9nsp\" (UID: \"ab22e184-91b4-446f-b2e2-29defe3ff835\") " pod="openstack/keystone-cron-29414581-g9nsp" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.339826 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab22e184-91b4-446f-b2e2-29defe3ff835-config-data\") pod \"keystone-cron-29414581-g9nsp\" (UID: \"ab22e184-91b4-446f-b2e2-29defe3ff835\") " pod="openstack/keystone-cron-29414581-g9nsp" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.340364 4631 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ab22e184-91b4-446f-b2e2-29defe3ff835-fernet-keys\") pod \"keystone-cron-29414581-g9nsp\" (UID: \"ab22e184-91b4-446f-b2e2-29defe3ff835\") " pod="openstack/keystone-cron-29414581-g9nsp" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.441615 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab22e184-91b4-446f-b2e2-29defe3ff835-combined-ca-bundle\") pod \"keystone-cron-29414581-g9nsp\" (UID: \"ab22e184-91b4-446f-b2e2-29defe3ff835\") " pod="openstack/keystone-cron-29414581-g9nsp" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.441729 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab22e184-91b4-446f-b2e2-29defe3ff835-config-data\") pod \"keystone-cron-29414581-g9nsp\" (UID: \"ab22e184-91b4-446f-b2e2-29defe3ff835\") " pod="openstack/keystone-cron-29414581-g9nsp" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.441763 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ab22e184-91b4-446f-b2e2-29defe3ff835-fernet-keys\") pod \"keystone-cron-29414581-g9nsp\" (UID: \"ab22e184-91b4-446f-b2e2-29defe3ff835\") " pod="openstack/keystone-cron-29414581-g9nsp" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.441924 4631 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pk5xs\" (UniqueName: \"kubernetes.io/projected/ab22e184-91b4-446f-b2e2-29defe3ff835-kube-api-access-pk5xs\") pod \"keystone-cron-29414581-g9nsp\" (UID: \"ab22e184-91b4-446f-b2e2-29defe3ff835\") " pod="openstack/keystone-cron-29414581-g9nsp" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.447740 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab22e184-91b4-446f-b2e2-29defe3ff835-config-data\") pod \"keystone-cron-29414581-g9nsp\" (UID: \"ab22e184-91b4-446f-b2e2-29defe3ff835\") " pod="openstack/keystone-cron-29414581-g9nsp" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.447777 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab22e184-91b4-446f-b2e2-29defe3ff835-combined-ca-bundle\") pod \"keystone-cron-29414581-g9nsp\" (UID: \"ab22e184-91b4-446f-b2e2-29defe3ff835\") " pod="openstack/keystone-cron-29414581-g9nsp" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.448262 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ab22e184-91b4-446f-b2e2-29defe3ff835-fernet-keys\") pod \"keystone-cron-29414581-g9nsp\" (UID: \"ab22e184-91b4-446f-b2e2-29defe3ff835\") " pod="openstack/keystone-cron-29414581-g9nsp" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.464025 4631 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pk5xs\" (UniqueName: \"kubernetes.io/projected/ab22e184-91b4-446f-b2e2-29defe3ff835-kube-api-access-pk5xs\") pod \"keystone-cron-29414581-g9nsp\" (UID: \"ab22e184-91b4-446f-b2e2-29defe3ff835\") " pod="openstack/keystone-cron-29414581-g9nsp" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.466903 4631 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29414581-g9nsp" Dec 04 19:01:00 crc kubenswrapper[4631]: I1204 19:01:00.984981 4631 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29414581-g9nsp"] var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114355163024451 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114355164017367 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114341561016506 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114341562015457 5ustar corecore